diff --git a/.gitignore b/.gitignore index 9c58933ad..bbff03815 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,7 @@ src/qt/interchained-qt.includes *.rej *.orig *.pyc +__pycache__/ *.o *.o-* *.a diff --git a/ambassadors/interchained-node-operator-portal/README.md b/ambassadors/interchained-node-operator-portal/README.md new file mode 100644 index 000000000..0f4519c08 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/README.md @@ -0,0 +1,97 @@ +# Interchained Operator Control Plane + +An enterprise-grade SaaS portal for managing Interchained node operators. The +platform combines a FastAPI backend, Redis DB 6 telemetry store, and a Next.js +frontend to deliver RBAC, analytics, automated rewards, and compliance tooling +for distributed infrastructure teams. + +## Backend + +* Framework: FastAPI +* Data store: Redis DB 6 +* Location: `backend/` + +### Highlights + +* **Multi-tenant RBAC** – Super admins provision organisations, org admins manage + operators/auditors through invite workflows, and all access is tokenised. +* **Node lifecycle management** – Register, tag, and flag nodes per + organisation. Background monitors capture uptime, latency, and block height + every 60 seconds. +* **Reward distribution** – Daily payouts weight uptime scores with service plan + multipliers, taper shares for P2P-only seed nodes, and persist + organisation-level history plus lifetime accruals. +* **Compliance exports** – Org and super admins can download CSV reward ledgers + with wallet destinations, daily shares, and pending balances for payout ops. +* **Analytics & billing** – API endpoints expose fleet metrics (MRR, plan + distribution, uptime trends) and tenant billing summaries. +* **Audit logging** – Immutable audit events for invitations, role updates, + organisation changes, and node flagging. + +### Running locally + +```bash +cd backend +python -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +uvicorn main:app --reload +``` + +Set `PORTAL_REDIS_URL` if Redis is not running at `redis://localhost:6379/6`. + +## Frontend + +* Framework: Next.js + TailwindCSS +* Location: `frontend/` + +### Key screens + +``` +frontend/ +├─ pages/ +│ ├─ index.js # Marketing / landing +│ ├─ login.js # Auth & invite flow +│ ├─ dashboard.js # Executive overview +│ ├─ nodes.js # Node inventory & provisioning +│ ├─ rewards.js # Reward analytics +│ └─ admin/ +│ ├─ audit.js # Audit trail browser +│ ├─ billing.js # Billing summary +│ ├─ organizations.js # Super-admin tenant management +│ └─ users.js # Team access & invites +├─ components/ +│ ├─ layout/AdminShell.js +│ ├─ navigation/SidebarNav.js +│ ├─ cards/MetricCard.js +│ ├─ charts/UptimeTrend.js +│ ├─ tables/AuditTable.js +│ ├─ tables/OrganizationTable.js +│ ├─ NodeTable.js +│ └─ RewardGraph.js +└─ lib/api.js +``` + +### Running locally + +```bash +cd frontend +npm install +npm run dev +``` + +The frontend targets `http://localhost:8000/api/v1` by default. Override with +`NEXT_PUBLIC_API_BASE` when deploying. Both tiers share the same Redis instance +for telemetry and session storage. + +## Architectural Notes + +* **Lifespan-managed services** – `NodeMonitor` and `RewardDistributor` start + and stop with FastAPI, ensuring clean shutdown of background tasks. +* **Analytics service layer** – `services/analytics.py` aggregates plan + distribution, uptime averages, and billing information for dashboards. +* **Storage layout** – Redis hashes and sets are namespaced per organisation + (`org:{id}:*`) to simplify tenant isolation and analytics fan-out. +* **Frontend auth context** – `AuthProvider` persists bearer sessions, injects + tokens into API calls, and gates protected routes via `AdminShell`. + diff --git a/ambassadors/interchained-node-operator-portal/backend/__init__.py b/ambassadors/interchained-node-operator-portal/backend/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/ambassadors/interchained-node-operator-portal/backend/auth.py b/ambassadors/interchained-node-operator-portal/backend/auth.py new file mode 100644 index 000000000..45638e8f1 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/auth.py @@ -0,0 +1,316 @@ +"""Authentication utilities for the Node Operator portal.""" +from __future__ import annotations + +import json +from datetime import datetime, timedelta +from typing import Any, Optional + +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from passlib.context import CryptContext + +from .config import get_settings +from .models import ( + AdminUserCreate, + InviteCreate, + InvitePublic, + OrganizationCreate, + ServicePlanTier, + SessionToken, + UserCreate, + UserPublic, + UserRole, +) +from .services.audit import record_audit_event +from .services.organizations import create_organization, get_organization +from .utils.ids import random_token +from .utils.redis_client import get_redis + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +security = HTTPBearer(auto_error=False) + + +async def get_user(email: str) -> Optional[dict[str, str]]: + redis = await get_redis() + user_key = f"users:{email}" + user = await redis.hgetall(user_key) + return user or None + + +async def create_user(payload: UserCreate) -> UserPublic: + """Register a user using an invite or bootstrap the first super admin.""" + + redis = await get_redis() + existing = await redis.hgetall(f"users:{payload.email}") + if existing: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="User already exists") + + invite_details = await _consume_invite(payload.invite_code) if payload.invite_code else None + total_users = int(await redis.get("meta:user_count") or 0) + + if invite_details: + organization_id = invite_details["organization_id"] + role = UserRole(invite_details["role"]) + elif total_users == 0: + org = await create_organization( + OrganizationCreate(name="Interchained Core", billing_email=payload.email, plan=ServicePlanTier.ENTERPRISE), + owner_email=payload.email, + ) + organization_id = org.id + role = UserRole.SUPER_ADMIN + else: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Registration requires an invitation") + + created_at = datetime.utcnow() + await redis.hset( + f"users:{payload.email}", + mapping={ + "email": payload.email, + "password": pwd_context.hash(payload.password), + "full_name": payload.full_name, + "created_at": created_at.isoformat(), + "organization_id": organization_id, + "role": role.value, + "is_active": 1, + }, + ) + await redis.sadd(f"org:{organization_id}:members", payload.email) + await redis.incr("meta:user_count") + await record_audit_event( + actor_email=payload.email, + action="user.registered", + organization_id=organization_id, + metadata={"role": role.value}, + ) + return UserPublic( + email=payload.email, + full_name=payload.full_name, + organization_id=organization_id, + role=role, + created_at=created_at, + ) + + +async def create_admin_user(payload: AdminUserCreate, actor: UserPublic) -> UserPublic: + redis = await get_redis() + organization = await get_organization(payload.organization_id) + if not organization: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Organization not found") + if actor.role != UserRole.SUPER_ADMIN and payload.organization_id != actor.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot manage another organization") + if actor.role != UserRole.SUPER_ADMIN and payload.role == UserRole.SUPER_ADMIN: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Only super admins can grant super admin access") + if await redis.exists(f"users:{payload.email}"): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="User already exists") + created_at = datetime.utcnow() + await redis.hset( + f"users:{payload.email}", + mapping={ + "email": payload.email, + "password": pwd_context.hash(payload.password), + "full_name": payload.full_name, + "created_at": created_at.isoformat(), + "organization_id": payload.organization_id, + "role": payload.role.value, + "is_active": 1, + }, + ) + await redis.sadd(f"org:{payload.organization_id}:members", payload.email) + await redis.incr("meta:user_count") + await record_audit_event( + actor_email=actor.email, + action="user.invited", + organization_id=payload.organization_id, + target=payload.email, + metadata={"role": payload.role.value}, + ) + return UserPublic( + email=payload.email, + full_name=payload.full_name, + organization_id=payload.organization_id, + role=payload.role, + created_at=created_at, + ) + + +async def list_org_users(organization_id: str) -> list[UserPublic]: + redis = await get_redis() + members = await redis.smembers(f"org:{organization_id}:members") + results: list[UserPublic] = [] + for email in members: + data = await redis.hgetall(f"users:{email}") + if not data or not int(data.get("is_active", 1)): + continue + results.append(_deserialize_user(data)) + return sorted(results, key=lambda u: u.created_at) + + +async def update_user_role(email: str, role: UserRole, actor: UserPublic) -> UserPublic: + redis = await get_redis() + user_data = await redis.hgetall(f"users:{email}") + if not user_data: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + if actor.role != UserRole.SUPER_ADMIN and user_data.get("organization_id") != actor.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot manage another organization") + if actor.role != UserRole.SUPER_ADMIN and role == UserRole.SUPER_ADMIN: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Only super admins can grant super admin access") + await redis.hset(f"users:{email}", mapping={"role": role.value}) + await record_audit_event( + actor_email=actor.email, + action="user.role.updated", + organization_id=user_data.get("organization_id"), + target=email, + metadata={"role": role.value}, + ) + user_data["role"] = role.value + return _deserialize_user(user_data) + + +async def deactivate_user(email: str, actor: UserPublic) -> None: + redis = await get_redis() + user_key = f"users:{email}" + user_data = await redis.hgetall(user_key) + if not user_data: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + if actor.role != UserRole.SUPER_ADMIN and user_data.get("organization_id") != actor.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot manage another organization") + await redis.hset(user_key, mapping={"is_active": 0}) + await redis.srem(f"org:{user_data['organization_id']}:members", email) + await record_audit_event( + actor_email=actor.email, + action="user.deactivated", + organization_id=user_data.get("organization_id"), + target=email, + ) + + +async def authenticate_user(email: str, password: str) -> Optional[UserPublic]: + user = await get_user(email) + if not user or not int(user.get("is_active", 1)): + return None + if not pwd_context.verify(password, user.get("password", "")): + return None + return _deserialize_user(user) + + +async def create_session_token(user: UserPublic) -> SessionToken: + redis = await get_redis() + settings = get_settings() + token = random_token() + payload = { + "email": user.email, + "role": user.role.value, + "organization_id": user.organization_id, + } + await redis.setex(f"sessions:{token}", settings.session_ttl_seconds, json.dumps(payload)) + return SessionToken(access_token=token, expires_in=settings.session_ttl_seconds, user=user) + + +async def resolve_token(credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)) -> UserPublic: + if credentials is None or credentials.scheme.lower() != "bearer": + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing credentials") + redis = await get_redis() + raw = await redis.get(f"sessions:{credentials.credentials}") + if not raw: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") + payload = json.loads(raw) + user = await get_user(payload["email"]) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Inactive user") + return _deserialize_user(user) + + +async def get_current_user(user: UserPublic = Depends(resolve_token)) -> UserPublic: + return user + + +async def create_invite(payload: InviteCreate, actor: UserPublic) -> InvitePublic: + redis = await get_redis() + organization = await get_organization(payload.organization_id) + if not organization: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Organization not found") + if actor.role != UserRole.SUPER_ADMIN and organization.id != actor.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot invite to another organization") + if actor.role != UserRole.SUPER_ADMIN and payload.role == UserRole.SUPER_ADMIN: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Only super admins can invite super admins") + code = random_token(20) + expires_at = datetime.utcnow() + timedelta(hours=payload.expires_in_hours) + await redis.hset( + f"invites:{code}", + mapping={ + "code": code, + "organization_id": payload.organization_id, + "role": payload.role.value, + "expires_at": expires_at.isoformat(), + "created_by": actor.email, + "note": payload.note or "", + }, + ) + await redis.sadd("invites:index", code) + await record_audit_event( + actor_email=actor.email, + action="invite.created", + organization_id=payload.organization_id, + metadata={"code": code, "role": payload.role.value}, + ) + return InvitePublic( + code=code, + organization_id=payload.organization_id, + role=payload.role, + expires_at=expires_at, + created_by=actor.email, + note=payload.note, + ) + + +async def list_invites(organization_id: str) -> list[InvitePublic]: + redis = await get_redis() + codes = await redis.smembers("invites:index") + invites: list[InvitePublic] = [] + for code in codes: + data = await redis.hgetall(f"invites:{code}") + if not data or data.get("organization_id") != organization_id: + continue + expires_at = datetime.fromisoformat(data["expires_at"]) + if expires_at < datetime.utcnow(): + await redis.delete(f"invites:{code}") + await redis.srem("invites:index", code) + continue + invites.append( + InvitePublic( + code=data["code"], + organization_id=data["organization_id"], + role=UserRole(data["role"]), + expires_at=expires_at, + created_by=data["created_by"], + note=data.get("note") or None, + ) + ) + return invites + + +async def _consume_invite(code: str | None) -> dict[str, Any] | None: + if not code: + return None + redis = await get_redis() + data = await redis.hgetall(f"invites:{code}") + if not data: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid invitation code") + expires_at = datetime.fromisoformat(data["expires_at"]) + if expires_at < datetime.utcnow(): + await redis.delete(f"invites:{code}") + await redis.srem("invites:index", code) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invitation expired") + await redis.delete(f"invites:{code}") + await redis.srem("invites:index", code) + return data + + +def _deserialize_user(data: dict[str, str]) -> UserPublic: + return UserPublic( + email=data["email"], + full_name=data.get("full_name", data["email"]), + organization_id=data.get("organization_id", ""), + role=UserRole(data.get("role", UserRole.OPERATOR.value)), + created_at=datetime.fromisoformat(data["created_at"]), + ) diff --git a/ambassadors/interchained-node-operator-portal/backend/config.py b/ambassadors/interchained-node-operator-portal/backend/config.py new file mode 100644 index 000000000..452c54a08 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/config.py @@ -0,0 +1,31 @@ +"""Application configuration via environment variables.""" +from __future__ import annotations + +from functools import lru_cache +from typing import List + +from pydantic import AnyUrl, Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Strongly typed application settings loaded from the environment.""" + + app_name: str = "Interchained Operator Control Plane" + environment: str = Field(default="development", description="Deployment environment name") + redis_url: AnyUrl = Field(default="redis://localhost:6379/6", description="Redis connection URL") + session_ttl_seconds: int = Field(default=60 * 60 * 24 * 7, ge=3600) + monitor_interval_seconds: int = Field(default=60, ge=15) + reward_distribution_hour_utc: int = Field(default=0, ge=0, le=23) + audit_log_retention_days: int = Field(default=90, ge=7) + allowed_cors_origins: List[str] = Field(default_factory=lambda: ["*"]) + default_plan: str = Field(default="enterprise") + + model_config = SettingsConfigDict(env_prefix="PORTAL_", env_file=".env", extra="ignore") + + +@lru_cache(maxsize=1) +def get_settings() -> Settings: + """Return cached application settings.""" + + return Settings() diff --git a/ambassadors/interchained-node-operator-portal/backend/dependencies.py b/ambassadors/interchained-node-operator-portal/backend/dependencies.py new file mode 100644 index 000000000..535d7caa8 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/dependencies.py @@ -0,0 +1,34 @@ +"""FastAPI dependency helpers.""" +from __future__ import annotations + +from collections.abc import Callable + +from fastapi import Depends, HTTPException, status + +from .auth import get_current_user +from .models import UserPublic, UserRole + + +def require_roles(*roles: UserRole) -> Callable[[UserPublic], UserPublic]: + """FastAPI dependency enforcing that the current user has one of the roles.""" + + allowed_roles: set[UserRole] = set(roles) + + async def dependency(current_user: UserPublic = Depends(get_current_user)) -> UserPublic: + if current_user.role not in allowed_roles: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient permissions") + return current_user + + return dependency + + +def require_super_admin() -> Callable[[UserPublic], UserPublic]: + """Shortcut dependency for super-admin protected endpoints.""" + + return require_roles(UserRole.SUPER_ADMIN) + + +def require_org_admin() -> Callable[[UserPublic], UserPublic]: + """Shortcut dependency for org-admin and super-admin roles.""" + + return require_roles(UserRole.SUPER_ADMIN, UserRole.ORG_ADMIN) diff --git a/ambassadors/interchained-node-operator-portal/backend/main.py b/ambassadors/interchained-node-operator-portal/backend/main.py new file mode 100644 index 000000000..e9b7ac918 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/main.py @@ -0,0 +1,54 @@ +"""FastAPI entry point for the Node Operator Rewards Portal backend.""" +from __future__ import annotations + +import asyncio +from contextlib import asynccontextmanager +from typing import AsyncIterator + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from .config import get_settings +from .node_monitor import NodeMonitor +from .rewards import RewardDistributor +from .routes import admin, analytics, audit, nodes, rewards, users +from .utils.redis_client import close_redis + + +@asynccontextmanager +async def lifespan(_: FastAPI) -> AsyncIterator[None]: + monitor = NodeMonitor() + rewards_job = RewardDistributor() + await asyncio.gather(monitor.start(), rewards_job.start()) + try: + yield + finally: + await asyncio.gather(monitor.stop(), rewards_job.stop()) + await close_redis() + + +settings = get_settings() + +app = FastAPI(title=settings.app_name, lifespan=lifespan) + +app.add_middleware( + CORSMiddleware, + allow_origins=settings.allowed_cors_origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +api_prefix = "/api/v1" + +app.include_router(users.router, prefix=api_prefix) +app.include_router(admin.router, prefix=api_prefix) +app.include_router(nodes.router, prefix=api_prefix) +app.include_router(rewards.router, prefix=api_prefix) +app.include_router(analytics.router, prefix=api_prefix) +app.include_router(audit.router, prefix=api_prefix) + + +@app.get("/") +async def root() -> dict[str, str]: + return {"status": "ok", "environment": settings.environment} diff --git a/ambassadors/interchained-node-operator-portal/backend/models.py b/ambassadors/interchained-node-operator-portal/backend/models.py new file mode 100644 index 000000000..de5f1f45a --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/models.py @@ -0,0 +1,214 @@ +"""Pydantic models used across the FastAPI application.""" +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any, Optional + +from pydantic import BaseModel, EmailStr, Field, HttpUrl + + +class UserRole(str, Enum): + """Supported authorization roles.""" + + SUPER_ADMIN = "super_admin" + ORG_ADMIN = "org_admin" + OPERATOR = "operator" + AUDITOR = "auditor" + + +class ServicePlanTier(str, Enum): + """Service plan tiers for tenant organisations.""" + + LAUNCH = "launch" + GROWTH = "growth" + ENTERPRISE = "enterprise" + + +class FeatureSettings(BaseModel): + """Feature toggles that can be attached to an organisation.""" + + realtime_alerting: bool = True + automated_payouts: bool = False + ai_insights: bool = False + gamified_badges: bool = True + compliance_reporting: bool = True + unlimited_seats: bool = False + + +class OrganizationCreate(BaseModel): + name: str = Field(min_length=2, max_length=120) + billing_email: EmailStr + plan: ServicePlanTier = ServicePlanTier.ENTERPRISE + feature_overrides: Optional[FeatureSettings] = None + slug: Optional[str] = Field(default=None, description="Custom slug for vanity URLs") + + +class Organization(BaseModel): + id: str + name: str + slug: str + billing_email: EmailStr + plan: ServicePlanTier + features: FeatureSettings + created_at: datetime + updated_at: datetime + owner_email: EmailStr + is_active: bool = True + + +class OrganizationUpdate(BaseModel): + name: Optional[str] = None + billing_email: Optional[EmailStr] = None + plan: Optional[ServicePlanTier] = None + feature_overrides: Optional[FeatureSettings] = None + is_active: Optional[bool] = None + + +class UserCreate(BaseModel): + email: EmailStr + password: str = Field(min_length=12) + full_name: str = Field(min_length=2) + invite_code: Optional[str] = Field(default=None, description="Invitation code when registering") + + +class AdminUserCreate(BaseModel): + email: EmailStr + password: str = Field(min_length=12) + full_name: str + role: UserRole = UserRole.OPERATOR + organization_id: str + + +class UserLogin(BaseModel): + email: EmailStr + password: str + + +class UserPublic(BaseModel): + email: EmailStr + full_name: str + organization_id: str + role: UserRole + created_at: datetime + + +class SessionToken(BaseModel): + access_token: str + token_type: str = "bearer" + expires_in: int + user: UserPublic + + +class InviteCreate(BaseModel): + organization_id: str + role: UserRole = UserRole.OPERATOR + expires_in_hours: int = Field(default=72, ge=1, le=24 * 14) + note: Optional[str] = None + + +class InvitePublic(BaseModel): + code: str + organization_id: str + role: UserRole + expires_at: datetime + created_by: EmailStr + note: Optional[str] = None + + +class NodeRegistration(BaseModel): + name: str = Field(min_length=3, max_length=80) + p2p_address: str = Field(description="IP or hostname with port, e.g. node.example:18080") + rpc_url: HttpUrl + wallet_address: str + owner_email: Optional[EmailStr] = None + tags: list[str] = Field(default_factory=list) + + +class NodeUpdate(BaseModel): + name: Optional[str] = None + p2p_address: Optional[str] = None + rpc_url: Optional[HttpUrl] = None + wallet_address: Optional[str] = None + owner_email: Optional[EmailStr] = None + tags: Optional[list[str]] = None + is_flagged: Optional[bool] = None + + +class NodeStatus(BaseModel): + id: str + organization_id: str + name: str + p2p_address: str + rpc_url: HttpUrl + wallet_address: str + owner_email: Optional[EmailStr] = None + tags: list[str] = Field(default_factory=list) + last_seen: Optional[datetime] = None + uptime_score: float = 0.0 + total_checks: int = 0 + successful_checks: int = 0 + latency_ms: Optional[float] = None + block_height: Optional[int] = None + is_flagged: bool = False + p2p_online: bool = False + rpc_responding: bool = False + fully_online: bool = False + + +class RewardSummary(BaseModel): + date: datetime + rewards: dict[str, float] + pool_balance: float + + +class RewardHistoryItem(BaseModel): + date: datetime + amount: float + node_id: str + + +class RewardHistory(BaseModel): + organization_id: str + history: list[RewardHistoryItem] + + +class PoolBalance(BaseModel): + balance: float + + +class PoolTopUpRequest(BaseModel): + amount: float = Field(gt=0, description="Amount to add to the reward pool") + + +class AuditEvent(BaseModel): + id: str + actor_email: EmailStr + organization_id: Optional[str] + action: str + target: Optional[str] = None + metadata: dict[str, Any] = Field(default_factory=dict) + created_at: datetime + + +class MetricTimeseriesPoint(BaseModel): + timestamp: datetime + value: float + + +class AdminDashboardMetrics(BaseModel): + total_active_nodes: int + avg_uptime: float + flagged_nodes: int + mrr: float + plan_distribution: dict[str, int] + uptime_timeseries: list[MetricTimeseriesPoint] + + +class BillingSummary(BaseModel): + organization_id: str + plan: ServicePlanTier + monthly_cost: float + included_nodes: int + additional_node_price: float + current_month_usage: int diff --git a/ambassadors/interchained-node-operator-portal/backend/node_monitor.py b/ambassadors/interchained-node-operator-portal/backend/node_monitor.py new file mode 100644 index 000000000..ba995ddad --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/node_monitor.py @@ -0,0 +1,81 @@ +"""Background task that periodically checks node health metrics.""" +from __future__ import annotations + +import asyncio +import json +from datetime import datetime, timedelta +from typing import Any + +from .config import get_settings +from .utils.bitcoin_rpc import check_node_health +from .utils.redis_client import get_redis + + +class NodeMonitor: + """Continuously check registered nodes and persist uptime metrics.""" + + def __init__(self, interval_seconds: int | None = None) -> None: + settings = get_settings() + self._interval = interval_seconds or settings.monitor_interval_seconds + self._task: asyncio.Task[Any] | None = None + self._stop_event = asyncio.Event() + + async def start(self) -> None: + if self._task is None: + self._stop_event.clear() + self._task = asyncio.create_task(self._run()) + + async def stop(self) -> None: + if self._task: + self._stop_event.set() + await self._task + self._task = None + + async def _run(self) -> None: + while not self._stop_event.is_set(): + await self._check_all_nodes() + try: + await asyncio.wait_for(self._stop_event.wait(), timeout=self._interval) + except asyncio.TimeoutError: + continue + + async def _check_all_nodes(self) -> None: + redis = await get_redis() + node_ids = await redis.smembers("node:index") + now = datetime.utcnow() + for node_id in node_ids: + node = await redis.hgetall(f"node:{node_id}") + if not node: + continue + health = await check_node_health(node.get("p2p_address", ""), node.get("rpc_url", "")) + stats_key = f"uptime:{node_id}" + total_checks = await redis.hincrby(stats_key, "total_checks", 1) + if health.is_online: + successful_checks = await redis.hincrby(stats_key, "successful_checks", 1) + else: + successful_checks = int(await redis.hget(stats_key, "successful_checks") or 0) + uptime_score = successful_checks / total_checks if total_checks else 0.0 + await redis.hset( + stats_key, + mapping={ + "total_checks": total_checks, + "successful_checks": successful_checks, + "uptime_score": uptime_score, + "last_seen": now.isoformat(), + "latency_ms": health.latency_ms, + "block_height": health.block_height or 0, + "rpc_responding": int(health.rpc_responding), + "p2p_online": int(health.p2p_online), + "fully_online": int(health.is_fully_online), + }, + ) + timeseries_entry = json.dumps( + { + "timestamp": now.isoformat(), + "node_id": node_id, + "uptime": uptime_score, + } + ) + await redis.zadd("metrics:uptime:global", {timeseries_entry: now.timestamp()}) + cutoff = (datetime.utcnow() - timedelta(days=14)).timestamp() + await redis.zremrangebyscore("metrics:uptime:global", 0, cutoff) diff --git a/ambassadors/interchained-node-operator-portal/backend/requirements.txt b/ambassadors/interchained-node-operator-portal/backend/requirements.txt new file mode 100644 index 000000000..796540bb1 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/requirements.txt @@ -0,0 +1,8 @@ +fastapi==0.110.0 +uvicorn[standard]==0.27.1 +redis==5.0.4 +httpx==0.27.0 +passlib[bcrypt]==1.7.4 +python-multipart==0.0.9 +pydantic-settings==2.2.1 +python-slugify==8.0.4 diff --git a/ambassadors/interchained-node-operator-portal/backend/rewards.py b/ambassadors/interchained-node-operator-portal/backend/rewards.py new file mode 100644 index 000000000..e6bbd1a9a --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/rewards.py @@ -0,0 +1,134 @@ +"""Reward distribution utilities.""" +from __future__ import annotations + +import asyncio +from datetime import datetime, timedelta +from typing import Any + +from .config import get_settings +from .models import ServicePlanTier +from .utils.redis_client import get_redis + + +REWARD_PREFIX = "rewards:" +REWARD_HISTORY_PREFIX = "rewards:history:" +PLAN_MULTIPLIER = { + ServicePlanTier.LAUNCH: 1.0, + ServicePlanTier.GROWTH: 1.15, + ServicePlanTier.ENTERPRISE: 1.3, +} + +P2P_ONLY_REWARD_FACTOR = 0.6 + + +async def distribute_daily_rewards(date: datetime | None = None) -> dict[str, float]: + redis = await get_redis() + snapshot_date = (date or datetime.utcnow()).date() + date_key = f"{REWARD_PREFIX}{snapshot_date.isoformat()}" + + total_pool_raw = await redis.get("pool:balance") + total_pool = float(total_pool_raw or 0) + if total_pool <= 0: + return {} + + node_ids = await redis.smembers("node:index") + active_nodes: list[tuple[str, float, str]] = [] + for node_id in node_ids: + stats = await redis.hgetall(f"uptime:{node_id}") + score = float(stats.get("uptime_score", 0)) + if score < 0.9: + continue + node = await redis.hgetall(f"node:{node_id}") + if not node: + continue + rpc_responding = bool(int(stats.get("rpc_responding", "0") or 0)) + p2p_online = bool(int(stats.get("p2p_online", "0") or 0)) + if not p2p_online: + continue + org_id = node.get("organization_id") or "" + plan_value = await redis.hget(f"org:{org_id}", "plan") or ServicePlanTier.LAUNCH.value + try: + plan_enum = ServicePlanTier(plan_value) + except ValueError: + plan_enum = ServicePlanTier.LAUNCH + multiplier = PLAN_MULTIPLIER.get(plan_enum, 1.0) + interface_multiplier = 1.0 if rpc_responding else P2P_ONLY_REWARD_FACTOR + weight = score * multiplier * interface_multiplier + active_nodes.append((node_id, weight, org_id)) + + if not active_nodes: + return {} + + total_weight = sum(weight for _, weight, _ in active_nodes) + if total_weight == 0: + return {} + + rewards: dict[str, float] = {} + org_rewards: dict[str, float] = {} + for node_id, weight, org_id in active_nodes: + share = (weight / total_weight) * total_pool + rewards[node_id] = round(share, 8) + org_rewards[org_id] = org_rewards.get(org_id, 0.0) + share + + if rewards: + await redis.hset(date_key, mapping={node_id: str(amount) for node_id, amount in rewards.items()}) + await redis.set("pool:balance", 0) + for node_id, amount in rewards.items(): + node = await redis.hgetall(f"node:{node_id}") + org_id = node.get("organization_id", "") + ledger_key = f"node:{node_id}:rewards" + await redis.hincrbyfloat(ledger_key, "pending", amount) + await redis.hincrbyfloat(ledger_key, "lifetime", amount) + await redis.hset( + ledger_key, + mapping={ + "last_share": str(amount), + "last_rewarded_at": snapshot_date.isoformat(), + }, + ) + await redis.lpush( + f"{REWARD_HISTORY_PREFIX}{org_id}", + f"{snapshot_date.isoformat()}:{amount}:{node_id}", + ) + for org_id, total_amount in org_rewards.items(): + await redis.hincrbyfloat(f"org:{org_id}:rewards", "lifetime", total_amount) + await redis.hset(f"org:{org_id}:rewards", mapping={"last_payout": snapshot_date.isoformat()}) + return rewards + + +class RewardDistributor: + """Background job that triggers a distribution once every 24 hours.""" + + def __init__(self, run_at_hour: int | None = None, run_at_minute: int = 5) -> None: + settings = get_settings() + self._run_at_hour = run_at_hour if run_at_hour is not None else settings.reward_distribution_hour_utc + self._run_at_minute = run_at_minute + self._task: asyncio.Task[Any] | None = None + self._stop_event = asyncio.Event() + + async def start(self) -> None: + if self._task is None: + self._stop_event.clear() + self._task = asyncio.create_task(self._loop()) + + async def stop(self) -> None: + if self._task: + self._stop_event.set() + await self._task + self._task = None + + async def _loop(self) -> None: + while not self._stop_event.is_set(): + seconds_until_run = self._seconds_until_next_run() + try: + await asyncio.wait_for(self._stop_event.wait(), timeout=seconds_until_run) + continue + except asyncio.TimeoutError: + await distribute_daily_rewards() + + def _seconds_until_next_run(self) -> float: + now = datetime.utcnow() + next_run = now.replace(hour=self._run_at_hour, minute=self._run_at_minute, second=0, microsecond=0) + if next_run <= now: + next_run = next_run + timedelta(days=1) + return (next_run - now).total_seconds() diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/__init__.py b/ambassadors/interchained-node-operator-portal/backend/routes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/admin.py b/ambassadors/interchained-node-operator-portal/backend/routes/admin.py new file mode 100644 index 000000000..113e01737 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/admin.py @@ -0,0 +1,91 @@ +"""Administrative API endpoints for the control panel.""" +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException, status + +from .. import auth +from ..dependencies import require_org_admin, require_super_admin +from ..models import ( + AdminUserCreate, + Organization, + OrganizationCreate, + OrganizationUpdate, + UserPublic, + UserRole, +) +from ..services import organizations +from ..services.audit import record_audit_event + + +router = APIRouter(prefix="/admin", tags=["admin"]) + + +@router.get("/organizations", response_model=list[Organization]) +async def list_organizations_route(_: UserPublic = Depends(require_super_admin())) -> list[Organization]: + return await organizations.list_organizations() + + +@router.post("/organizations", response_model=Organization, status_code=status.HTTP_201_CREATED) +async def create_organization_route( + payload: OrganizationCreate, + current_user: UserPublic = Depends(require_super_admin()), +) -> Organization: + org = await organizations.create_organization(payload, owner_email=current_user.email) + await record_audit_event(actor_email=current_user.email, action="organization.created", organization_id=org.id) + return org + + +@router.patch("/organizations/{organization_id}", response_model=Organization) +async def update_organization_route( + organization_id: str, + payload: OrganizationUpdate, + current_user: UserPublic = Depends(require_super_admin()), +) -> Organization: + org = await organizations.update_organization(organization_id, payload) + if not org: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Organization not found") + changed_fields = list(payload.model_dump(exclude_none=True).keys()) + await record_audit_event( + actor_email=current_user.email, + action="organization.updated", + organization_id=org.id, + metadata={"fields": changed_fields}, + ) + return org + + +@router.post("/users", response_model=UserPublic, status_code=status.HTTP_201_CREATED) +async def create_user_route( + payload: AdminUserCreate, + current_user: UserPublic = Depends(require_org_admin()), +) -> UserPublic: + if current_user.role != UserRole.SUPER_ADMIN and payload.organization_id != current_user.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot manage another organization") + return await auth.create_admin_user(payload, current_user) + + +@router.get("/organizations/{organization_id}/users", response_model=list[UserPublic]) +async def list_org_users_route( + organization_id: str, + current_user: UserPublic = Depends(require_org_admin()), +) -> list[UserPublic]: + if current_user.role != UserRole.SUPER_ADMIN and current_user.organization_id != organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot view another organization") + return await auth.list_org_users(organization_id) + + +@router.post("/users/{email}/role", response_model=UserPublic) +async def update_user_role_route( + email: str, + role: UserRole, + current_user: UserPublic = Depends(require_org_admin()), +) -> UserPublic: + return await auth.update_user_role(email, role, current_user) + + +@router.post("/users/{email}/deactivate", status_code=status.HTTP_204_NO_CONTENT) +async def deactivate_user_route( + email: str, + current_user: UserPublic = Depends(require_org_admin()), +) -> None: + await auth.deactivate_user(email, current_user) diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/analytics.py b/ambassadors/interchained-node-operator-portal/backend/routes/analytics.py new file mode 100644 index 000000000..5f9fddf52 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/analytics.py @@ -0,0 +1,26 @@ +"""Analytics API endpoints.""" +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException, status + +from ..dependencies import require_org_admin, require_super_admin +from ..models import AdminDashboardMetrics, BillingSummary, UserPublic, UserRole +from ..services import analytics + + +router = APIRouter(prefix="/analytics", tags=["analytics"]) + + +@router.get("/dashboard", response_model=AdminDashboardMetrics) +async def get_dashboard_metrics(_: UserPublic = Depends(require_super_admin())) -> AdminDashboardMetrics: + return await analytics.get_dashboard_metrics() + + +@router.get("/billing/{organization_id}", response_model=BillingSummary) +async def get_billing_summary( + organization_id: str, + current_user: UserPublic = Depends(require_org_admin()), +) -> BillingSummary: + if current_user.role != UserRole.SUPER_ADMIN and current_user.organization_id != organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot access another organization") + return await analytics.get_billing_summary(organization_id) diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/audit.py b/ambassadors/interchained-node-operator-portal/backend/routes/audit.py new file mode 100644 index 000000000..7ab013bee --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/audit.py @@ -0,0 +1,21 @@ +"""Audit trail endpoints.""" +from __future__ import annotations + +from fastapi import APIRouter, Depends + +from ..dependencies import require_org_admin +from ..models import AuditEvent, UserPublic, UserRole +from ..services.audit import fetch_audit_events + + +router = APIRouter(prefix="/audit", tags=["audit"]) + + +@router.get("", response_model=list[AuditEvent]) +async def get_audit_trail( + limit: int = 100, + current_user: UserPublic = Depends(require_org_admin()), +) -> list[AuditEvent]: + if current_user.role == UserRole.SUPER_ADMIN: + return await fetch_audit_events(limit=limit) + return await fetch_audit_events(limit=limit, organization_id=current_user.organization_id) diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/nodes.py b/ambassadors/interchained-node-operator-portal/backend/routes/nodes.py new file mode 100644 index 000000000..ad43bb83e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/nodes.py @@ -0,0 +1,158 @@ +"""Node management endpoints.""" +from __future__ import annotations + +from datetime import datetime + +from fastapi import APIRouter, Depends, HTTPException, status + +from ..dependencies import require_org_admin, require_roles +from ..models import NodeRegistration, NodeStatus, NodeUpdate, UserPublic, UserRole +from ..services.audit import record_audit_event +from ..utils.ids import short_ulid +from ..utils.redis_client import get_redis + + +router = APIRouter(prefix="/nodes", tags=["nodes"]) + + +NodeWriter = require_roles(UserRole.SUPER_ADMIN, UserRole.ORG_ADMIN, UserRole.OPERATOR) + + +@router.post("", response_model=NodeStatus, status_code=status.HTTP_201_CREATED) +async def create_node( + payload: NodeRegistration, + current_user: UserPublic = Depends(NodeWriter), +) -> NodeStatus: + organization_id = current_user.organization_id + owner_email = payload.owner_email or current_user.email + if current_user.role in {UserRole.OPERATOR, UserRole.AUDITOR} and owner_email != current_user.email: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Cannot assign nodes to other members") + + redis = await get_redis() + node_id = short_ulid("node") + now = datetime.utcnow() + await redis.hset( + f"node:{node_id}", + mapping={ + "id": node_id, + "organization_id": organization_id, + "name": payload.name, + "p2p_address": payload.p2p_address, + "rpc_url": str(payload.rpc_url), + "wallet_address": payload.wallet_address, + "owner_email": owner_email, + "tags": ",".join(payload.tags), + "created_at": now.isoformat(), + "updated_at": now.isoformat(), + "is_flagged": 0, + }, + ) + await redis.sadd("node:index", node_id) + await redis.sadd(f"org:{organization_id}:nodes", node_id) + await redis.sadd(f"user:{owner_email}:nodes", node_id) + await record_audit_event( + actor_email=current_user.email, + action="node.created", + organization_id=organization_id, + target=node_id, + metadata={"name": payload.name}, + ) + return await _build_node_status(node_id) + + +@router.post("/register", response_model=NodeStatus, status_code=status.HTTP_201_CREATED) +async def register_legacy_node( + payload: NodeRegistration, + current_user: UserPublic = Depends(NodeWriter), +) -> NodeStatus: + return await create_node(payload, current_user) + + +@router.get("", response_model=list[NodeStatus]) +async def list_nodes(current_user: UserPublic = Depends(require_org_admin())) -> list[NodeStatus]: + redis = await get_redis() + if current_user.role == UserRole.SUPER_ADMIN: + node_ids = await redis.smembers("node:index") + else: + node_ids = await redis.smembers(f"org:{current_user.organization_id}:nodes") + results: list[NodeStatus] = [] + for node_id in node_ids: + try: + results.append(await _build_node_status(node_id)) + except HTTPException: + continue + return results + + +@router.get("/{node_id}", response_model=NodeStatus) +async def get_node(node_id: str, current_user: UserPublic = Depends(require_org_admin())) -> NodeStatus: + node = await _build_node_status(node_id) + if current_user.role != UserRole.SUPER_ADMIN and node.organization_id != current_user.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Node does not belong to your organization") + return node + + +@router.patch("/{node_id}", response_model=NodeStatus) +async def update_node( + node_id: str, + payload: NodeUpdate, + current_user: UserPublic = Depends(NodeWriter), +) -> NodeStatus: + redis = await get_redis() + node_key = f"node:{node_id}" + node_data = await redis.hgetall(node_key) + if not node_data: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Node not found") + if current_user.role != UserRole.SUPER_ADMIN and node_data.get("organization_id") != current_user.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Node does not belong to your organization") + updates = payload.model_dump(exclude_none=True) + if "tags" in updates: + updates["tags"] = ",".join(updates["tags"]) + if "is_flagged" in updates: + updates["is_flagged"] = 1 if bool(updates["is_flagged"]) else 0 + if updates: + updates["updated_at"] = datetime.utcnow().isoformat() + await redis.hset(node_key, mapping=updates) + if "owner_email" in updates: + await redis.srem(f"user:{node_data.get('owner_email')}:nodes", node_id) + await redis.sadd(f"user:{updates['owner_email']}:nodes", node_id) + if "is_flagged" in updates: + await record_audit_event( + actor_email=current_user.email, + action="node.flagged" if updates["is_flagged"] else "node.unflagged", + organization_id=node_data.get("organization_id"), + target=node_id, + ) + return await _build_node_status(node_id) + + +async def _build_node_status(node_id: str) -> NodeStatus: + redis = await get_redis() + node = await redis.hgetall(f"node:{node_id}") + if not node: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Node not found") + stats = await redis.hgetall(f"uptime:{node_id}") + latency_raw = float(stats.get("latency_ms", 0)) if stats.get("latency_ms") else None + if latency_raw is not None and latency_raw < 0: + latency_raw = None + tags = node.get("tags", "") + return NodeStatus( + id=node_id, + organization_id=node.get("organization_id", ""), + name=node.get("name", ""), + p2p_address=node.get("p2p_address", ""), + rpc_url=node.get("rpc_url", ""), + wallet_address=node.get("wallet_address", ""), + owner_email=node.get("owner_email") or None, + tags=[tag for tag in tags.split(",") if tag], + last_seen=datetime.fromisoformat(stats["last_seen"]) if stats.get("last_seen") else None, + uptime_score=float(stats.get("uptime_score", 0.0)), + total_checks=int(stats.get("total_checks", 0)), + successful_checks=int(stats.get("successful_checks", 0)), + latency_ms=latency_raw, + block_height=int(stats.get("block_height", 0)) if stats.get("block_height") else None, + is_flagged=bool(int(node.get("is_flagged", 0))), + p2p_online=bool(int(stats.get("p2p_online", "0") or 0)), + rpc_responding=bool(int(stats.get("rpc_responding", "0") or 0)), + fully_online=bool(int(stats.get("fully_online", "0") or 0)), + ) diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/rewards.py b/ambassadors/interchained-node-operator-portal/backend/routes/rewards.py new file mode 100644 index 000000000..866ba380e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/rewards.py @@ -0,0 +1,174 @@ +"""Reward data endpoints.""" +from __future__ import annotations + +import csv +from datetime import date, datetime +from io import StringIO + +from fastapi import APIRouter, Depends, HTTPException, Query +from fastapi.responses import StreamingResponse + +from .. import auth +from ..dependencies import require_org_admin, require_super_admin +from ..models import ( + PoolBalance, + PoolTopUpRequest, + RewardHistory, + RewardHistoryItem, + RewardSummary, + UserPublic, + UserRole, +) +from ..rewards import distribute_daily_rewards +from ..services.audit import record_audit_event +from ..utils.redis_client import get_redis + + +def _as_float(value: str | None) -> float: + try: + return float(value) if value is not None else 0.0 + except (TypeError, ValueError): + return 0.0 + +router = APIRouter(prefix="/rewards", tags=["rewards"]) + + +@router.post("/run", response_model=RewardSummary) +async def trigger_rewards(_: UserPublic = Depends(require_super_admin())) -> RewardSummary: + rewards = await distribute_daily_rewards() + snapshot = datetime.utcnow() + redis = await get_redis() + pool_balance = float(await redis.get("pool:balance") or 0) + return RewardSummary(date=snapshot, rewards=rewards, pool_balance=pool_balance) + + +@router.post("/pool/top-up", response_model=PoolBalance) +async def top_up_reward_pool( + payload: PoolTopUpRequest, + current_user: UserPublic = Depends(require_super_admin()), +) -> PoolBalance: + redis = await get_redis() + new_balance = await redis.incrbyfloat("pool:balance", payload.amount) + await record_audit_event( + actor_email=current_user.email, + action="rewards.pool.top_up", + metadata={"amount": f"{payload.amount:.8f}", "balance": f"{new_balance:.8f}"}, + ) + return PoolBalance(balance=float(new_balance)) + + +@router.get("/today", response_model=RewardSummary) +async def rewards_today(current_user: UserPublic = Depends(auth.get_current_user)) -> RewardSummary: + today = date.today().isoformat() + redis = await get_redis() + rewards = await redis.hgetall(f"rewards:{today}") + rewards_float: dict[str, float] = {} + for node_id, amount in rewards.items(): + if current_user.role != UserRole.SUPER_ADMIN: + node = await redis.hgetall(f"node:{node_id}") + if node.get("organization_id") != current_user.organization_id: + continue + rewards_float[node_id] = float(amount) + pool_balance = float(await redis.get("pool:balance") or 0) + return RewardSummary(date=datetime.utcnow(), rewards=rewards_float, pool_balance=pool_balance) + + +@router.get("/history", response_model=RewardHistory) +async def reward_history(current_user: UserPublic = Depends(require_org_admin())) -> RewardHistory: + redis = await get_redis() + entries = await redis.lrange(f"rewards:history:{current_user.organization_id}", 0, 50) + history: list[RewardHistoryItem] = [] + for entry in entries: + try: + date_str, amount_str, node_id = entry.split(":", 2) + history.append( + RewardHistoryItem(date=datetime.fromisoformat(date_str), amount=float(amount_str), node_id=node_id) + ) + except ValueError: + continue + return RewardHistory(organization_id=current_user.organization_id, history=list(reversed(history))) + + +@router.get("/export", response_class=StreamingResponse) +async def export_rewards_csv( + report_date: date | None = Query(default=None, alias="date"), + organization_id: str | None = Query(default=None, alias="organizationId"), + current_user: UserPublic = Depends(require_org_admin()), +) -> StreamingResponse: + if organization_id and current_user.role != UserRole.SUPER_ADMIN: + raise HTTPException(status_code=403, detail="Cannot export other organizations") + + target_date = report_date or date.today() + redis = await get_redis() + rewards_key = f"rewards:{target_date.isoformat()}" + reward_map = await redis.hgetall(rewards_key) + + node_ids: set[str] = set(reward_map.keys()) + if organization_id: + node_ids.update(await redis.smembers(f"org:{organization_id}:nodes")) + elif current_user.role == UserRole.SUPER_ADMIN: + node_ids.update(await redis.smembers("node:index")) + else: + organization_id = current_user.organization_id + node_ids.update(await redis.smembers(f"org:{organization_id}:nodes")) + + rows: list[dict[str, str]] = [] + org_cache: dict[str, dict[str, str]] = {} + + for node_id in sorted(node_ids): + node = await redis.hgetall(f"node:{node_id}") + if not node: + continue + org_id = node.get("organization_id") or "" + if current_user.role != UserRole.SUPER_ADMIN and org_id != current_user.organization_id: + continue + if organization_id and org_id != organization_id: + continue + + ledger = await redis.hgetall(f"node:{node_id}:rewards") + org_data = org_cache.get(org_id) + if org_data is None: + org_data = await redis.hgetall(f"org:{org_id}") if org_id else {} + org_cache[org_id] = org_data + + rows.append( + { + "organization_id": org_id, + "organization_name": org_data.get("name", ""), + "node_id": node_id, + "node_name": node.get("name", ""), + "wallet_address": node.get("wallet_address", ""), + "owner_email": node.get("owner_email", ""), + "today_share": f"{_as_float(reward_map.get(node_id)):.8f}", + "pending_rewards": f"{_as_float(ledger.get('pending')):.8f}", + "lifetime_rewards": f"{_as_float(ledger.get('lifetime')):.8f}", + "last_rewarded_at": ledger.get("last_rewarded_at", ""), + } + ) + + output = StringIO() + fieldnames = [ + "organization_id", + "organization_name", + "node_id", + "node_name", + "wallet_address", + "owner_email", + "today_share", + "pending_rewards", + "lifetime_rewards", + "last_rewarded_at", + ] + writer = csv.DictWriter(output, fieldnames=fieldnames) + writer.writeheader() + writer.writerows(rows) + + filename = f"reward-export-{target_date.isoformat()}" + if organization_id: + filename += f"-{organization_id}" + output.seek(0) + csv_bytes = output.getvalue().encode("utf-8") + headers = { + "Content-Disposition": f'attachment; filename="{filename}.csv"', + } + return StreamingResponse(iter([csv_bytes]), media_type="text/csv", headers=headers) diff --git a/ambassadors/interchained-node-operator-portal/backend/routes/users.py b/ambassadors/interchained-node-operator-portal/backend/routes/users.py new file mode 100644 index 000000000..9d97450a1 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/routes/users.py @@ -0,0 +1,41 @@ +"""User authentication routes.""" +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException, status + +from .. import auth +from ..dependencies import require_org_admin +from ..models import InviteCreate, InvitePublic, SessionToken, UserCreate, UserLogin, UserPublic + +router = APIRouter(prefix="/users", tags=["users"]) + + +@router.post("/register", response_model=UserPublic, status_code=status.HTTP_201_CREATED) +async def register_user(payload: UserCreate) -> UserPublic: + return await auth.create_user(payload) + + +@router.post("/login", response_model=SessionToken) +async def login_user(payload: UserLogin) -> SessionToken: + user = await auth.authenticate_user(payload.email, payload.password) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials") + return await auth.create_session_token(user) + + +@router.get("/me", response_model=UserPublic) +async def get_me(current_user: UserPublic = Depends(auth.get_current_user)) -> UserPublic: + return current_user + + +@router.post("/invites", response_model=InvitePublic) +async def create_invite( + payload: InviteCreate, + current_user: UserPublic = Depends(require_org_admin()), +) -> InvitePublic: + return await auth.create_invite(payload, current_user) + + +@router.get("/invites", response_model=list[InvitePublic]) +async def list_invites(current_user: UserPublic = Depends(require_org_admin())) -> list[InvitePublic]: + return await auth.list_invites(current_user.organization_id) diff --git a/ambassadors/interchained-node-operator-portal/backend/services/__init__.py b/ambassadors/interchained-node-operator-portal/backend/services/__init__.py new file mode 100644 index 000000000..b9182a1a0 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/services/__init__.py @@ -0,0 +1 @@ +"""Service layer modules for the control plane backend.""" diff --git a/ambassadors/interchained-node-operator-portal/backend/services/analytics.py b/ambassadors/interchained-node-operator-portal/backend/services/analytics.py new file mode 100644 index 000000000..2bb069a20 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/services/analytics.py @@ -0,0 +1,112 @@ +"""Analytics services supporting the admin dashboard.""" +from __future__ import annotations + +import json +from collections import defaultdict +from datetime import date, datetime, timedelta + +from ..models import AdminDashboardMetrics, BillingSummary, MetricTimeseriesPoint, ServicePlanTier +from ..utils.redis_client import get_redis + + +PLAN_PRICING = { + ServicePlanTier.LAUNCH: {"price": 99.0, "included": 5, "overage": 20.0}, + ServicePlanTier.GROWTH: {"price": 299.0, "included": 20, "overage": 15.0}, + ServicePlanTier.ENTERPRISE: {"price": 799.0, "included": 100, "overage": 10.0}, +} + + +async def get_dashboard_metrics() -> AdminDashboardMetrics: + redis = await get_redis() + node_ids = await redis.smembers("node:index") + total_active_nodes = 0 + uptime_scores: list[float] = [] + flagged_nodes = 0 + + for node_id in node_ids: + node = await redis.hgetall(f"node:{node_id}") + if not node: + continue + stats = await redis.hgetall(f"uptime:{node_id}") + score = float(stats.get("uptime_score", 0.0)) if stats else 0.0 + if score >= 0.9: + total_active_nodes += 1 + uptime_scores.append(score) + if int(node.get("is_flagged", 0)): + flagged_nodes += 1 + + avg_uptime = sum(uptime_scores) / len(uptime_scores) if uptime_scores else 0.0 + + plan_distribution: dict[str, int] = {} + org_ids = await redis.smembers("org:index") + for org_id in org_ids: + raw_plan = await redis.hget(f"org:{org_id}", "plan") or ServicePlanTier.LAUNCH.value + try: + plan_enum = ServicePlanTier(raw_plan) + except ValueError: + plan_enum = ServicePlanTier.LAUNCH + plan_distribution[plan_enum.value] = plan_distribution.get(plan_enum.value, 0) + 1 + + mrr = 0.0 + for plan, count in plan_distribution.items(): + try: + plan_enum = ServicePlanTier(plan) + except ValueError: + continue + mrr += PLAN_PRICING.get(plan_enum, {"price": 0})["price"] * count + + now = datetime.utcnow() + window_start = now - timedelta(days=6) + raw_entries = await redis.zrangebyscore("metrics:uptime:global", window_start.timestamp(), "+inf") + buckets: dict[date, list[float]] = defaultdict(list) + for entry in raw_entries: + try: + payload = json.loads(entry) + timestamp = datetime.fromisoformat(payload["timestamp"]) + uptime = float(payload.get("uptime", 0.0)) + except (KeyError, TypeError, ValueError, json.JSONDecodeError): + continue + buckets[timestamp.date()].append(uptime) + + points: list[MetricTimeseriesPoint] = [] + for offset in range(6, -1, -1): + day = (now - timedelta(days=offset)).date() + samples = buckets.get(day, []) + average = sum(samples) / len(samples) if samples else 0.0 + points.append( + MetricTimeseriesPoint( + timestamp=datetime.combine(day, datetime.min.time()), + value=round(average * 100, 2), + ) + ) + + return AdminDashboardMetrics( + total_active_nodes=total_active_nodes, + avg_uptime=round(avg_uptime, 4), + flagged_nodes=flagged_nodes, + mrr=float(mrr), + plan_distribution=plan_distribution, + uptime_timeseries=points, + ) + + +async def get_billing_summary(organization_id: str) -> BillingSummary: + redis = await get_redis() + plan_value = await redis.hget(f"org:{organization_id}", "plan") or ServicePlanTier.LAUNCH.value + try: + plan = ServicePlanTier(plan_value) + except ValueError: + plan = ServicePlanTier.LAUNCH + pricing = PLAN_PRICING[plan] + current_usage = await redis.scard(f"org:{organization_id}:nodes") + included = pricing["included"] + overage_nodes = max(current_usage - included, 0) + monthly_cost = pricing["price"] + overage_nodes * pricing["overage"] + return BillingSummary( + organization_id=organization_id, + plan=plan, + monthly_cost=monthly_cost, + included_nodes=included, + additional_node_price=pricing["overage"], + current_month_usage=current_usage, + ) diff --git a/ambassadors/interchained-node-operator-portal/backend/services/audit.py b/ambassadors/interchained-node-operator-portal/backend/services/audit.py new file mode 100644 index 000000000..8cfb52bb7 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/services/audit.py @@ -0,0 +1,79 @@ +"""Audit logging services.""" +from __future__ import annotations + +import json +from datetime import datetime, timedelta + +from ..config import get_settings +from ..models import AuditEvent +from ..utils.ids import short_ulid +from ..utils.redis_client import get_redis + + +async def record_audit_event( + *, + actor_email: str, + action: str, + organization_id: str | None = None, + target: str | None = None, + metadata: dict[str, str] | None = None, +) -> AuditEvent: + """Persist an immutable audit event.""" + + redis = await get_redis() + now = datetime.utcnow() + event_id = short_ulid("audit") + event = AuditEvent( + id=event_id, + actor_email=actor_email, + organization_id=organization_id, + action=action, + target=target, + metadata=metadata or {}, + created_at=now, + ) + await redis.hset( + f"audit:{event_id}", + mapping={ + "id": event.id, + "actor_email": event.actor_email, + "organization_id": event.organization_id or "", + "action": event.action, + "target": event.target or "", + "metadata": json.dumps(event.metadata), + "created_at": event.created_at.isoformat(), + }, + ) + await redis.zadd("audit:index", {event_id: now.timestamp()}) + await _enforce_retention() + return event + + +async def fetch_audit_events(*, limit: int = 100, organization_id: str | None = None) -> list[AuditEvent]: + redis = await get_redis() + event_ids = await redis.zrevrange("audit:index", 0, limit - 1) + events: list[AuditEvent] = [] + for event_id in event_ids: + data = await redis.hgetall(f"audit:{event_id}") + if not data: + continue + event = AuditEvent( + id=data["id"], + actor_email=data["actor_email"], + organization_id=data["organization_id"] or None, + action=data["action"], + target=data["target"] or None, + metadata=json.loads(data.get("metadata", "{}")), + created_at=datetime.fromisoformat(data["created_at"]), + ) + if organization_id and event.organization_id != organization_id: + continue + events.append(event) + return events + + +async def _enforce_retention() -> None: + settings = get_settings() + redis = await get_redis() + cutoff = datetime.utcnow() - timedelta(days=settings.audit_log_retention_days) + await redis.zremrangebyscore("audit:index", 0, cutoff.timestamp()) diff --git a/ambassadors/interchained-node-operator-portal/backend/services/organizations.py b/ambassadors/interchained-node-operator-portal/backend/services/organizations.py new file mode 100644 index 000000000..497c07428 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/services/organizations.py @@ -0,0 +1,150 @@ +"""Organisation management services.""" +from __future__ import annotations + +from datetime import datetime + +from slugify import slugify + +from ..models import FeatureSettings, Organization, OrganizationCreate, OrganizationUpdate, ServicePlanTier +from ..utils.ids import short_ulid +from ..utils.redis_client import get_redis + + +def _plan_defaults(plan: ServicePlanTier) -> FeatureSettings: + if plan == ServicePlanTier.LAUNCH: + return FeatureSettings( + realtime_alerting=False, + gamified_badges=True, + compliance_reporting=False, + ) + if plan == ServicePlanTier.GROWTH: + return FeatureSettings( + realtime_alerting=True, + gamified_badges=True, + automated_payouts=False, + ai_insights=False, + ) + return FeatureSettings( + realtime_alerting=True, + automated_payouts=True, + ai_insights=True, + gamified_badges=True, + compliance_reporting=True, + unlimited_seats=True, + ) + + +async def create_organization(payload: OrganizationCreate, owner_email: str) -> Organization: + redis = await get_redis() + org_id = short_ulid("org") + now = datetime.utcnow() + slug = payload.slug or slugify(payload.name) + features = payload.feature_overrides or _plan_defaults(payload.plan) + org_key = f"org:{org_id}" + await redis.hset( + org_key, + mapping={ + "id": org_id, + "name": payload.name, + "slug": slug, + "billing_email": payload.billing_email, + "plan": payload.plan.value, + "features": features.model_dump_json(), + "created_at": now.isoformat(), + "updated_at": now.isoformat(), + "owner_email": owner_email, + "is_active": 1, + }, + ) + await redis.hset("org:slugs", slug, org_id) + await redis.sadd("org:index", org_id) + return Organization( + id=org_id, + name=payload.name, + slug=slug, + billing_email=payload.billing_email, + plan=payload.plan, + features=features, + created_at=now, + updated_at=now, + owner_email=owner_email, + is_active=True, + ) + + +async def get_organization(org_id: str) -> Organization | None: + redis = await get_redis() + data = await redis.hgetall(f"org:{org_id}") + if not data: + return None + return _deserialize_org(data) + + +async def get_organization_by_slug(slug: str) -> Organization | None: + redis = await get_redis() + org_id = await redis.hget("org:slugs", slug) + if not org_id: + return None + return await get_organization(org_id) + + +async def update_organization(org_id: str, payload: OrganizationUpdate) -> Organization | None: + redis = await get_redis() + org_key = f"org:{org_id}" + data = await redis.hgetall(org_key) + if not data: + return None + updates: dict[str, str | int] = {} + if payload.name: + updates["name"] = payload.name + new_slug = slugify(payload.name) + current_slug = data.get("slug") + if current_slug and new_slug != current_slug: + await redis.hdel("org:slugs", current_slug) + await redis.hset("org:slugs", new_slug, org_id) + updates["slug"] = new_slug + if payload.billing_email: + updates["billing_email"] = payload.billing_email + if payload.plan: + updates["plan"] = payload.plan.value + if payload.feature_overrides: + updates["features"] = payload.feature_overrides.model_dump_json() + if payload.is_active is not None: + updates["is_active"] = 1 if payload.is_active else 0 + if updates: + updates["updated_at"] = datetime.utcnow().isoformat() + await redis.hset(org_key, mapping=updates) + data.update({k: str(v) for k, v in updates.items()}) + return _deserialize_org(data) + + +async def list_organizations() -> list[Organization]: + redis = await get_redis() + org_ids = await redis.smembers("org:index") + results: list[Organization] = [] + for org_id in org_ids: + data = await redis.hgetall(f"org:{org_id}") + if data: + org = _deserialize_org(data) + results.append(org) + return sorted(results, key=lambda org: org.created_at) + + +async def count_organizations() -> int: + redis = await get_redis() + return await redis.scard("org:index") + + +def _deserialize_org(data: dict[str, str]) -> Organization: + return Organization( + id=data["id"], + name=data["name"], + slug=data["slug"], + billing_email=data["billing_email"], + plan=ServicePlanTier(data.get("plan", ServicePlanTier.LAUNCH.value)), + features=FeatureSettings.model_validate_json(data.get("features", FeatureSettings().model_dump_json())), + created_at=datetime.fromisoformat(data["created_at"]), + updated_at=datetime.fromisoformat(data["updated_at"]), + owner_email=data.get("owner_email", ""), + is_active=bool(int(data.get("is_active", "1"))), + ) diff --git a/ambassadors/interchained-node-operator-portal/backend/utils/__init__.py b/ambassadors/interchained-node-operator-portal/backend/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/ambassadors/interchained-node-operator-portal/backend/utils/bitcoin_rpc.py b/ambassadors/interchained-node-operator-portal/backend/utils/bitcoin_rpc.py new file mode 100644 index 000000000..8a7b1c694 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/utils/bitcoin_rpc.py @@ -0,0 +1,87 @@ +"""Helpers for performing lightweight node health checks.""" +from __future__ import annotations + +import asyncio +import json +import time +from dataclasses import dataclass +from typing import Optional +from urllib.parse import urlparse + +import httpx + + +@dataclass +class NodeHealth: + is_online: bool + latency_ms: float + block_height: Optional[int] + rpc_responding: bool + p2p_online: bool = False + is_fully_online: bool = False + + +async def _check_tcp_connectivity(host: str, port: int, timeout: float = 2.0) -> float: + start = time.perf_counter() + try: + reader, writer = await asyncio.wait_for( + asyncio.open_connection(host, port), timeout=timeout + ) + except (OSError, asyncio.TimeoutError): + return -1.0 + else: + writer.close() + await writer.wait_closed() + return (time.perf_counter() - start) * 1000 + + +async def _fetch_rpc_height(rpc_url: str, timeout: float = 2.0) -> tuple[bool, Optional[int]]: + try: + async with httpx.AsyncClient(timeout=timeout) as client: + response = await client.post( + rpc_url, + json={"jsonrpc": "1.0", "id": "health", "method": "getblockcount", "params": []}, + ) + response.raise_for_status() + payload = response.json() + except (httpx.HTTPError, json.JSONDecodeError): + return False, None + + height = None + if isinstance(payload, dict): + height = payload.get("result") + if isinstance(height, str) and height.isdigit(): + height = int(height) + return True, height if isinstance(height, int) else None + + +async def check_node_health(p2p_address: str, rpc_url: str) -> NodeHealth: + """Check whether a node responds over P2P and RPC interfaces.""" + + latency_ms = -1.0 + rpc_ok = False + block_height: Optional[int] = None + + if ":" in p2p_address: + host, port_str = p2p_address.rsplit(":", 1) + try: + port = int(port_str) + except ValueError: + port = 0 + if host and port: + latency_ms = await _check_tcp_connectivity(host, port) + + parsed = urlparse(rpc_url) + if parsed.scheme and parsed.netloc: + rpc_ok, block_height = await _fetch_rpc_height(rpc_url) + + p2p_ok = latency_ms >= 0 + is_online = p2p_ok + return NodeHealth( + is_online=is_online, + latency_ms=latency_ms if latency_ms >= 0 else -1, + block_height=block_height, + rpc_responding=rpc_ok, + p2p_online=p2p_ok, + is_fully_online=p2p_ok and rpc_ok, + ) diff --git a/ambassadors/interchained-node-operator-portal/backend/utils/ids.py b/ambassadors/interchained-node-operator-portal/backend/utils/ids.py new file mode 100644 index 000000000..764c34a0e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/utils/ids.py @@ -0,0 +1,20 @@ +"""Helpers for generating human friendly identifiers.""" +from __future__ import annotations + +import secrets +import string +from uuid import uuid4 + + +def short_ulid(prefix: str) -> str: + """Return a short identifier prefixed with the provided namespace.""" + + token = uuid4().hex[:12] + return f"{prefix}_{token}" + + +def random_token(length: int = 32) -> str: + """Generate a secure random token suitable for API keys or invites.""" + + alphabet = string.ascii_letters + string.digits + return "".join(secrets.choice(alphabet) for _ in range(length)) diff --git a/ambassadors/interchained-node-operator-portal/backend/utils/redis_client.py b/ambassadors/interchained-node-operator-portal/backend/utils/redis_client.py new file mode 100644 index 000000000..08c97e632 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/backend/utils/redis_client.py @@ -0,0 +1,44 @@ +"""Utility helpers for working with Redis connections.""" +from __future__ import annotations + +from functools import lru_cache +from typing import AsyncIterator + +from redis.asyncio import Redis + +from ..config import get_settings + + +@lru_cache +def _build_redis_client() -> Redis: + """Instantiate a Redis client using the configured connection URL.""" + + settings = get_settings() + return Redis.from_url(str(settings.redis_url), decode_responses=True) + + +async def get_redis() -> Redis: + """Return a cached Redis client instance.""" + + return _build_redis_client() + + +async def close_redis() -> None: + """Close the cached Redis connection pool, if one has been instantiated.""" + + client = _build_redis_client() + await client.close() + + +async def iter_hash_keys(prefix: str) -> AsyncIterator[str]: + """Iterate over keys that match the provided hash prefix. + + Parameters + ---------- + prefix: + A glob-compatible prefix, e.g. ``"node:*"``. + """ + + client = await get_redis() + async for key in client.scan_iter(match=prefix): + yield key diff --git a/ambassadors/interchained-node-operator-portal/frontend/.eslintrc.json b/ambassadors/interchained-node-operator-portal/frontend/.eslintrc.json new file mode 100644 index 000000000..97a2bb84e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": ["next", "next/core-web-vitals"] +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/GlassContainer.js b/ambassadors/interchained-node-operator-portal/frontend/components/GlassContainer.js new file mode 100644 index 000000000..7d898831a --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/GlassContainer.js @@ -0,0 +1,7 @@ +export default function GlassContainer({ children, className = '' }) { + return ( +
+ {children} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/NeonCard.js b/ambassadors/interchained-node-operator-portal/frontend/components/NeonCard.js new file mode 100644 index 000000000..074fb673f --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/NeonCard.js @@ -0,0 +1,9 @@ +export default function NeonCard({ title, value, footer }) { + return ( +
+

{title}

+

{value}

+ {footer &&

{footer}

} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/NodeTable.js b/ambassadors/interchained-node-operator-portal/frontend/components/NodeTable.js new file mode 100644 index 000000000..d50f6a541 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/NodeTable.js @@ -0,0 +1,65 @@ +import StatusBadge from './StatusBadge'; + +export default function NodeTable({ nodes }) { + if (!nodes?.length) { + return

No nodes registered yet.

; + } + + return ( +
+ + + + + + + + + + + + + + {nodes.map((node) => { + const uptime = (node.uptime_score || 0) * 100; + const status = (() => { + if (node.is_flagged) return 'Flagged'; + if (node.p2p_online && !node.rpc_responding) return 'Seed Online'; + if (node.p2p_online && node.rpc_responding) return 'Online'; + if (uptime > 70) return 'Degraded'; + if (uptime > 0) return 'Degraded'; + return 'Offline'; + })(); + const interfaceLabel = node.p2p_online + ? node.rpc_responding + ? 'P2P + RPC' + : 'P2P only' + : 'No signal'; + return ( + + + + + + + + + + ); + })} + +
NodeRPC URLLatencyBlock HeightUptimeTagsStatus
+
{node.name}
+
{node.p2p_address}
+
{node.owner_email || 'Unassigned'}
+
{node.rpc_url}{node.latency_ms ? `${node.latency_ms.toFixed(0)} ms` : '–'}{node.block_height || '–'}{uptime.toFixed(1)}% +
+ {node.tags?.length ? node.tags.map((tag) => #{tag}) : } +
+
+ +
{interfaceLabel}
+
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/RewardGraph.js b/ambassadors/interchained-node-operator-portal/frontend/components/RewardGraph.js new file mode 100644 index 000000000..86f71de38 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/RewardGraph.js @@ -0,0 +1,31 @@ +import { Area, AreaChart, CartesianGrid, ResponsiveContainer, Tooltip, XAxis, YAxis } from 'recharts'; + +export default function RewardGraph({ data }) { + if (!data?.length) { + return

No rewards distributed yet.

; + } + + return ( +
+ + + + + + + + + + + `${value.toFixed(2)} ITC`} /> + [`${Number(value).toFixed(4)} ITC`, 'Reward Pool']} + /> + + + +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/StatusBadge.js b/ambassadors/interchained-node-operator-portal/frontend/components/StatusBadge.js new file mode 100644 index 000000000..ee48e4c79 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/StatusBadge.js @@ -0,0 +1,17 @@ +const colors = { + online: 'bg-emerald-500/20 text-emerald-300 border-emerald-500/50', + 'seed online': 'bg-cyan-500/20 text-cyan-200 border-cyan-500/50', + offline: 'bg-rose-500/20 text-rose-300 border-rose-500/50', + degraded: 'bg-amber-500/20 text-amber-300 border-amber-500/50', + flagged: 'bg-fuchsia-500/20 text-fuchsia-200 border-fuchsia-400/60', +}; + +export default function StatusBadge({ status }) { + const key = status?.toLowerCase(); + const style = colors[key] || colors.degraded; + return ( + + {status} + + ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/cards/MetricCard.js b/ambassadors/interchained-node-operator-portal/frontend/components/cards/MetricCard.js new file mode 100644 index 000000000..66857d80a --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/cards/MetricCard.js @@ -0,0 +1,18 @@ +export default function MetricCard({ label, value, helper, tone = 'emerald' }) { + const tones = { + emerald: 'from-emerald-500/30 to-cyan-500/10 text-emerald-100', + amber: 'from-amber-500/30 to-rose-500/10 text-amber-100', + slate: 'from-slate-500/40 to-slate-700/10 text-slate-100', + fuchsia: 'from-fuchsia-500/40 to-indigo-500/10 text-fuchsia-100', + }; + + return ( +
+

{label}

+
+ {value} +
+ {helper &&

{helper}

} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/charts/UptimeTrend.js b/ambassadors/interchained-node-operator-portal/frontend/components/charts/UptimeTrend.js new file mode 100644 index 000000000..2c314103a --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/charts/UptimeTrend.js @@ -0,0 +1,36 @@ +import { Area, AreaChart, CartesianGrid, ResponsiveContainer, Tooltip, XAxis, YAxis } from 'recharts'; + +export default function UptimeTrend({ data }) { + if (!data?.length) { + return

No uptime data available.

; + } + + const formatted = data.map((point) => ({ + date: new Date(point.timestamp).toLocaleDateString(), + value: Math.round(point.value * 100) / 100, + })); + + return ( +
+ + + + + + + + + + + `${value}%`} /> + [`${value}%`, 'Average Uptime']} + /> + + + +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/layout/AdminShell.js b/ambassadors/interchained-node-operator-portal/frontend/components/layout/AdminShell.js new file mode 100644 index 000000000..31f5e911f --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/layout/AdminShell.js @@ -0,0 +1,57 @@ +import Head from 'next/head'; +import { useRouter } from 'next/router'; +import { useEffect } from 'react'; +import { useAuth } from '../../context/AuthContext'; +import SidebarNav from '../navigation/SidebarNav'; + +export default function AdminShell({ title, children }) { + const { user, loading, logout } = useAuth(); + const router = useRouter(); + + useEffect(() => { + if (!loading && !user) { + router.replace('/login'); + } + }, [user, loading, router]); + + if (loading || !user) { + return ( +
+ Loading control plane… +
+ ); + } + + return ( +
+ + {title ? `${title} · Interchained Control Plane` : 'Interchained Control Plane'} + + +
+
+
+

{title}

+

{user.organization_id}

+
+
+
+

{user.full_name || user.email}

+

{user.role.replace('_', ' ')}

+
+ +
+
+
{children}
+
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/navigation/SidebarNav.js b/ambassadors/interchained-node-operator-portal/frontend/components/navigation/SidebarNav.js new file mode 100644 index 000000000..e87011161 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/navigation/SidebarNav.js @@ -0,0 +1,44 @@ +import Link from 'next/link'; +import { useRouter } from 'next/router'; +import clsx from 'clsx'; + +const NAV_ITEMS = [ + { href: '/dashboard', label: 'Overview', roles: ['super_admin', 'org_admin', 'operator', 'auditor'] }, + { href: '/nodes', label: 'Nodes', roles: ['super_admin', 'org_admin', 'operator'] }, + { href: '/rewards', label: 'Rewards', roles: ['super_admin', 'org_admin', 'operator', 'auditor'] }, + { href: '/admin/users', label: 'Team Access', roles: ['super_admin', 'org_admin'] }, + { href: '/admin/organizations', label: 'Organizations', roles: ['super_admin'] }, + { href: '/admin/billing', label: 'Billing', roles: ['super_admin', 'org_admin'] }, + { href: '/admin/audit', label: 'Audit Log', roles: ['super_admin', 'org_admin'] }, +]; + +export default function SidebarNav({ role }) { + const router = useRouter(); + const items = NAV_ITEMS.filter((item) => item.roles.includes(role)); + + return ( + + ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/tables/AuditTable.js b/ambassadors/interchained-node-operator-portal/frontend/components/tables/AuditTable.js new file mode 100644 index 000000000..dfe2a903e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/tables/AuditTable.js @@ -0,0 +1,38 @@ +export default function AuditTable({ events }) { + if (!events?.length) { + return

No audit events captured yet.

; + } + + return ( +
+ + + + + + + + + + + + {events.map((event) => ( + + + + + + + + ))} + +
TimestampActorActionTargetMetadata
{new Date(event.created_at).toLocaleString()}{event.actor_email}{event.action}{event.target || '—'} + {Object.keys(event.metadata || {}).length ? ( +
{JSON.stringify(event.metadata, null, 2)}
+ ) : ( + '—' + )} +
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/components/tables/OrganizationTable.js b/ambassadors/interchained-node-operator-portal/frontend/components/tables/OrganizationTable.js new file mode 100644 index 000000000..520146c0e --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/components/tables/OrganizationTable.js @@ -0,0 +1,35 @@ +export default function OrganizationTable({ organizations }) { + if (!organizations?.length) { + return

No organizations found.

; + } + + return ( +
+ + + + + + + + + + + + {organizations.map((org) => ( + + + + + + + + ))} + +
NamePlanBilling EmailMembersCreated
+
{org.name}
+
{org.slug}
+
{org.plan}{org.billing_email}{org.member_count ?? '—'}{new Date(org.created_at).toLocaleDateString()}
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/context/AuthContext.js b/ambassadors/interchained-node-operator-portal/frontend/context/AuthContext.js new file mode 100644 index 000000000..515c17d58 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/context/AuthContext.js @@ -0,0 +1,68 @@ +import { createContext, useContext, useEffect, useMemo, useState } from 'react'; +import api from '../lib/api'; + +const AuthContext = createContext({ + user: null, + token: null, + loading: true, + login: async () => {}, + logout: () => {}, + refresh: async () => {}, +}); + +export function AuthProvider({ children }) { + const [user, setUser] = useState(null); + const [token, setToken] = useState(null); + const [loading, setLoading] = useState(true); + + useEffect(() => { + const stored = window.localStorage.getItem('ic:session'); + if (stored) { + try { + const parsed = JSON.parse(stored); + setToken(parsed.token); + setUser(parsed.user); + } catch (err) { + window.localStorage.removeItem('ic:session'); + } + } + setLoading(false); + }, []); + + const login = async (email, password) => { + setLoading(true); + try { + const session = await api.login({ email, password }); + setToken(session.access_token); + setUser(session.user); + window.localStorage.setItem('ic:session', JSON.stringify({ token: session.access_token, user: session.user })); + return session.user; + } finally { + setLoading(false); + } + }; + + const logout = () => { + setToken(null); + setUser(null); + window.localStorage.removeItem('ic:session'); + }; + + const refresh = async () => { + if (!token) return; + const me = await api.me(token); + setUser(me); + window.localStorage.setItem('ic:session', JSON.stringify({ token, user: me })); + }; + + const value = useMemo( + () => ({ user, token, loading, login, logout, refresh }), + [user, token, loading] + ); + + return {children}; +} + +export function useAuth() { + return useContext(AuthContext); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/jsconfig.json b/ambassadors/interchained-node-operator-portal/frontend/jsconfig.json new file mode 100644 index 000000000..36aa1a4dc --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/jsconfig.json @@ -0,0 +1,5 @@ +{ + "compilerOptions": { + "baseUrl": "." + } +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/lib/api.js b/ambassadors/interchained-node-operator-portal/frontend/lib/api.js new file mode 100644 index 000000000..833f6504b --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/lib/api.js @@ -0,0 +1,73 @@ +const API_BASE = process.env.NEXT_PUBLIC_API_BASE || 'http://localhost:8000/api/v1'; + +async function request(path, { method = 'GET', body, token } = {}) { + const headers = { 'Content-Type': 'application/json' }; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + const res = await fetch(`${API_BASE}${path}`, { + method, + headers, + body: body ? JSON.stringify(body) : undefined, + }); + if (!res.ok) { + const errorText = await res.text(); + throw new Error(errorText || 'Request failed'); + } + if (res.status === 204) { + return null; + } + return res.json(); +} + +function parseFilenameFromDisposition(disposition) { + if (!disposition) return null; + const match = disposition.match(/filename="?([^";]+)"?/i); + return match ? match[1] : null; +} + +export const api = { + login: (payload) => request('/users/login', { method: 'POST', body: payload }), + register: (payload) => request('/users/register', { method: 'POST', body: payload }), + me: (token) => request('/users/me', { token }), + listNodes: (token) => request('/nodes', { token }), + createNode: (token, payload) => request('/nodes', { method: 'POST', body: payload, token }), + updateNode: (token, nodeId, payload) => request(`/nodes/${nodeId}`, { method: 'PATCH', body: payload, token }), + dashboardMetrics: (token) => request('/analytics/dashboard', { token }), + billingSummary: (token, orgId) => request(`/analytics/billing/${orgId}`, { token }), + rewardSummary: (token) => request('/rewards/today', { token }), + rewardHistory: (token) => request('/rewards/history', { token }), + topUpRewardPool: (token, amount) => request('/rewards/pool/top-up', { method: 'POST', body: { amount }, token }), + exportRewardsCsv: async (token, params = {}) => { + const search = new URLSearchParams(); + if (params.date) search.set('date', params.date); + if (params.organizationId) search.set('organizationId', params.organizationId); + const query = search.toString(); + const url = `${API_BASE}/rewards/export${query ? `?${query}` : ''}`; + const headers = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + const res = await fetch(url, { headers }); + if (!res.ok) { + const errorText = await res.text(); + throw new Error(errorText || 'Export failed'); + } + const blob = await res.blob(); + const filename = parseFilenameFromDisposition(res.headers.get('Content-Disposition')) || + `reward-export-${params.date || new Date().toISOString().slice(0, 10)}.csv`; + return { blob, filename }; + }, + organizations: (token) => request('/admin/organizations', { token }), + createOrganization: (token, payload) => request('/admin/organizations', { method: 'POST', body: payload, token }), + updateOrganization: (token, orgId, payload) => request(`/admin/organizations/${orgId}`, { method: 'PATCH', body: payload, token }), + listUsers: (token, orgId) => request(`/admin/organizations/${orgId}/users`, { token }), + createUser: (token, payload) => request('/admin/users', { method: 'POST', body: payload, token }), + updateUserRole: (token, email, role) => request(`/admin/users/${encodeURIComponent(email)}/role?role=${role}`, { method: 'POST', token }), + deactivateUser: (token, email) => request(`/admin/users/${encodeURIComponent(email)}/deactivate`, { method: 'POST', token }), + createInvite: (token, payload) => request('/users/invites', { method: 'POST', body: payload, token }), + listInvites: (token) => request('/users/invites', { token }), + auditLog: (token, limit = 100) => request(`/audit?limit=${limit}`, { token }), +}; + +export default api; diff --git a/ambassadors/interchained-node-operator-portal/frontend/next.config.js b/ambassadors/interchained-node-operator-portal/frontend/next.config.js new file mode 100644 index 000000000..83ebfd747 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/next.config.js @@ -0,0 +1,10 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + reactStrictMode: true, + swcMinify: true, + images: { + unoptimized: true, + }, +}; + +module.exports = nextConfig; diff --git a/ambassadors/interchained-node-operator-portal/frontend/package.json b/ambassadors/interchained-node-operator-portal/frontend/package.json new file mode 100644 index 000000000..3c9898675 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/package.json @@ -0,0 +1,25 @@ +{ + "name": "interchained-node-operator-portal", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "clsx": "^2.1.0", + "next": "14.1.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "recharts": "^2.7.2", + "tailwindcss": "^3.4.1" + }, + "devDependencies": { + "autoprefixer": "^10.4.16", + "eslint": "^8.56.0", + "eslint-config-next": "14.1.0", + "postcss": "^8.4.31" + } +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/_app.js b/ambassadors/interchained-node-operator-portal/frontend/pages/_app.js new file mode 100644 index 000000000..df5752c38 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/_app.js @@ -0,0 +1,10 @@ +import '../styles/globals.css'; +import { AuthProvider } from '../context/AuthContext'; + +export default function App({ Component, pageProps }) { + return ( + + + + ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/_document.js b/ambassadors/interchained-node-operator-portal/frontend/pages/_document.js new file mode 100644 index 000000000..286531ca5 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/_document.js @@ -0,0 +1,20 @@ +import { Html, Head, Main, NextScript } from 'next/document'; + +export default function Document() { + return ( + + + + + + + +
+ + + + ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/admin/audit.js b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/audit.js new file mode 100644 index 000000000..fa09ad0d4 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/audit.js @@ -0,0 +1,39 @@ +import { useEffect, useState } from 'react'; +import AdminShell from '../../components/layout/AdminShell'; +import AuditTable from '../../components/tables/AuditTable'; +import { useAuth } from '../../context/AuthContext'; +import api from '../../lib/api'; + +export default function AuditLogPage() { + const { token } = useAuth(); + const [events, setEvents] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + + useEffect(() => { + if (!token) return; + async function load() { + try { + const data = await api.auditLog(token, 200); + setEvents(data || []); + } catch (err) { + setError('Unable to load audit log.'); + } finally { + setLoading(false); + } + } + load(); + }, [token]); + + return ( + + {loading ? ( +

Fetching compliance history…

+ ) : error ? ( +

{error}

+ ) : ( + + )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/admin/billing.js b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/billing.js new file mode 100644 index 000000000..c122e1802 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/billing.js @@ -0,0 +1,96 @@ +import { useEffect, useState } from 'react'; +import AdminShell from '../../components/layout/AdminShell'; +import { useAuth } from '../../context/AuthContext'; +import api from '../../lib/api'; + +const PLAN_COPY = { + launch: { + headline: 'Launch Tier', + description: 'Optimised for early ecosystem bootstrapping and small node fleets.', + perks: ['5 included nodes', 'Standard analytics', 'Community support'], + }, + growth: { + headline: 'Growth Tier', + description: 'Adds scaling capabilities and advanced monitoring for expanding teams.', + perks: ['20 included nodes', 'Performance reporting', 'Priority support'], + }, + enterprise: { + headline: 'Enterprise Tier', + description: 'Unlimited scale, compliance-ready reporting and dedicated TAM services.', + perks: ['100 included nodes', 'AI uptime forecasts', 'Dedicated success manager'], + }, +}; + +export default function BillingPage() { + const { token, user } = useAuth(); + const [summary, setSummary] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + + useEffect(() => { + if (!token || !user) return; + async function load() { + try { + const data = await api.billingSummary(token, user.organization_id); + setSummary(data); + } catch (err) { + setError('Unable to load billing summary.'); + } finally { + setLoading(false); + } + } + load(); + }, [token, user]); + + return ( + + {loading ? ( +

Calculating utilisation…

+ ) : error ? ( +

{error}

+ ) : summary ? ( +
+
+

Plan Overview

+

Monthly recurring charge calculated on active node usage.

+
+
+ Current Plan + {summary.plan} +
+
+ Monthly Cost + ${summary.monthly_cost.toFixed(2)} +
+
+ Included Nodes + {summary.included_nodes} +
+
+ Current Month Usage + {summary.current_month_usage} +
+
+ Additional Node Price + ${summary.additional_node_price.toFixed(2)} +
+
+
+
+

Plan Benefits

+
+ {(PLAN_COPY[summary.plan] || PLAN_COPY.launch).perks.map((perk) => ( +
+ + {perk} +
+ ))} +
+
+
+ ) : ( +

No billing data available.

+ )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/admin/organizations.js b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/organizations.js new file mode 100644 index 000000000..c1b24f05d --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/organizations.js @@ -0,0 +1,131 @@ +import { useEffect, useState } from 'react'; +import AdminShell from '../../components/layout/AdminShell'; +import OrganizationTable from '../../components/tables/OrganizationTable'; +import { useAuth } from '../../context/AuthContext'; +import api from '../../lib/api'; + +const PLANS = [ + { value: 'launch', label: 'Launch' }, + { value: 'growth', label: 'Growth' }, + { value: 'enterprise', label: 'Enterprise' }, +]; + +export default function OrganizationsPage() { + const { token, user } = useAuth(); + const [organizations, setOrganizations] = useState([]); + const [form, setForm] = useState({ name: '', billing_email: '', plan: 'growth' }); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + const [message, setMessage] = useState(''); + + const isSuperAdmin = user?.role === 'super_admin'; + + useEffect(() => { + if (!token || !isSuperAdmin) { + setLoading(false); + return; + } + async function load() { + try { + const data = await api.organizations(token); + setOrganizations(data || []); + } catch (err) { + setError('Unable to load organizations.'); + } finally { + setLoading(false); + } + } + load(); + }, [token, isSuperAdmin]); + + const handleChange = (event) => { + const { name, value } = event.target; + setForm((prev) => ({ ...prev, [name]: value })); + }; + + const createOrganization = async (event) => { + event.preventDefault(); + setMessage(''); + setError(''); + try { + const created = await api.createOrganization(token, form); + setOrganizations((prev) => [created, ...prev]); + setForm({ name: '', billing_email: '', plan: 'growth' }); + setMessage(`Organization ${created.name} created.`); + } catch (err) { + setError(err.message || 'Failed to create organization.'); + } + }; + + if (!isSuperAdmin) { + return ( + +

Super admin permissions required.

+
+ ); + } + + return ( + + {loading ? ( +

Loading tenants…

+ ) : ( +
+
+

Tenant Directory

+ +
+
+

Provision Organization

+
+ + + + +
+ {message &&

{message}

} + {error &&

{error}

} +
+
+ )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/admin/users.js b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/users.js new file mode 100644 index 000000000..d83beef19 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/admin/users.js @@ -0,0 +1,202 @@ +import { useEffect, useState } from 'react'; +import AdminShell from '../../components/layout/AdminShell'; +import { useAuth } from '../../context/AuthContext'; +import api from '../../lib/api'; + +const ROLE_OPTIONS = [ + { value: 'super_admin', label: 'Super Admin' }, + { value: 'org_admin', label: 'Org Admin' }, + { value: 'operator', label: 'Operator' }, + { value: 'auditor', label: 'Auditor' }, +]; + +export default function TeamAccessPage() { + const { token, user } = useAuth(); + const [members, setMembers] = useState([]); + const [invites, setInvites] = useState([]); + const [role, setRole] = useState('operator'); + const [expiresIn, setExpiresIn] = useState(72); + const [message, setMessage] = useState(''); + const [error, setError] = useState(''); + const [loading, setLoading] = useState(true); + + const isSuperAdmin = user?.role === 'super_admin'; + + useEffect(() => { + if (!token || !user) return; + async function load() { + try { + const [membersRes, inviteRes] = await Promise.all([ + api.listUsers(token, user.organization_id), + api.listInvites(token), + ]); + setMembers(membersRes || []); + setInvites(inviteRes || []); + } catch (err) { + setError('Unable to load team roster.'); + } finally { + setLoading(false); + } + } + load(); + }, [token, user]); + + const refreshInvites = async () => { + if (!token) return; + const inviteRes = await api.listInvites(token); + setInvites(inviteRes || []); + }; + + const refreshMembers = async () => { + if (!token || !user) return; + const membersRes = await api.listUsers(token, user.organization_id); + setMembers(membersRes || []); + }; + + const createInvite = async (event) => { + event.preventDefault(); + setMessage(''); + setError(''); + try { + const invite = await api.createInvite(token, { + organization_id: user.organization_id, + role, + expires_in_hours: expiresIn, + }); + setMessage(`Invite generated: ${invite.code}`); + await refreshInvites(); + } catch (err) { + setError(err.message || 'Failed to create invite.'); + } + }; + + const promoteUser = async (member, newRole) => { + setError(''); + try { + await api.updateUserRole(token, member.email, newRole); + await refreshMembers(); + } catch (err) { + setError('Unable to update user role.'); + } + }; + + const deactivateUser = async (member) => { + setError(''); + try { + await api.deactivateUser(token, member.email); + await refreshMembers(); + } catch (err) { + setError('Unable to deactivate user.'); + } + }; + + return ( + + {loading ? ( +

Loading access roster…

+ ) : ( +
+
+

Members

+
+ {members.map((member) => ( +
+
+
+
{member.full_name || member.email}
+
{member.role}
+
Joined {new Date(member.created_at).toLocaleString()}
+
+
+ + {member.email !== user.email && ( + + )} +
+
+
+ ))} +
+
+ +
+

Invite Collaborators

+

+ Generate time-bound invites to onboard additional operators or auditors. Invites expire automatically and can be + rescinded by regenerating the link. +

+
+ + + +
+
+

Active Invites

+ {invites.length ? ( + invites.map((invite) => ( +
+
+ {invite.code} + {invite.role} +
+
+ Expires {new Date(invite.expires_at).toLocaleString()} +
+
+ )) + ) : ( +

No active invites.

+ )} +
+ {message &&

{message}

} + {error &&

{error}

} +
+
+ )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/dashboard.js b/ambassadors/interchained-node-operator-portal/frontend/pages/dashboard.js new file mode 100644 index 000000000..cb231da34 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/dashboard.js @@ -0,0 +1,113 @@ +import { useEffect, useMemo, useState } from 'react'; +import AdminShell from '../components/layout/AdminShell'; +import NodeTable from '../components/NodeTable'; +import MetricCard from '../components/cards/MetricCard'; +import UptimeTrend from '../components/charts/UptimeTrend'; +import { useAuth } from '../context/AuthContext'; +import api from '../lib/api'; + +export default function Dashboard() { + const { token } = useAuth(); + const [metrics, setMetrics] = useState(null); + const [nodes, setNodes] = useState([]); + const [rewardSummary, setRewardSummary] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + + useEffect(() => { + if (!token) return; + async function load() { + try { + const [metricsRes, nodesRes, rewardRes] = await Promise.all([ + api.dashboardMetrics(token).catch(() => null), + api.listNodes(token).catch(() => []), + api.rewardSummary(token).catch(() => ({ rewards: {} })), + ]); + setMetrics(metricsRes); + setNodes(nodesRes || []); + setRewardSummary(rewardRes); + } catch (err) { + setError('Unable to load dashboard data.'); + } finally { + setLoading(false); + } + } + load(); + }, [token]); + + const rewardTotal = useMemo(() => { + if (!rewardSummary?.rewards) return 0; + return Object.values(rewardSummary.rewards).reduce((acc, value) => acc + Number(value), 0); + }, [rewardSummary]); + + return ( + + {loading ? ( +

Synthesising live telemetry…

+ ) : error ? ( +

{error}

+ ) : ( +
+
+ + + + +
+ +
+
+
+

Fleet Uptime

+ Last 7 days +
+ +
+
+

Plan Mix

+
+ {metrics?.plan_distribution && Object.keys(metrics.plan_distribution).length ? ( + Object.entries(metrics.plan_distribution).map(([plan, count]) => ( +
+ {plan} + {count} +
+ )) + ) : ( +

No organisations tracked yet.

+ )} +
+
+
+ +
+
+

Node Health Snapshot

+ Showing {nodes.length} nodes +
+ +
+
+ )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/index.js b/ambassadors/interchained-node-operator-portal/frontend/pages/index.js new file mode 100644 index 000000000..7a752cbd5 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/index.js @@ -0,0 +1,53 @@ +import Link from 'next/link'; + +export default function Landing() { + return ( +
+
+
Interchained Infrastructure
+

+ Enterprise Control Plane for Node Operators +

+

+ Operate mission-critical Interchained infrastructure with SLO-driven monitoring, weighted reward distribution, and + enterprise-grade access controls. +

+
+ + Launch Admin Portal + + + Explore Rewards + +
+
+
+ {[ + { + title: 'Zero-Touch Telemetry', + body: 'FastAPI orchestrators probe fleet health every 60 seconds with Redis-backed SLA dashboards.', + }, + { + title: 'Role-Based Control', + body: 'Multi-tenant RBAC with invite workflows, audit trails, and per-organization analytics.', + }, + { + title: 'Reward Intelligence', + body: 'Weighted payouts, billing summaries, and trend analytics keep operators incentivised.', + }, + ].map((item) => ( +
+

{item.title}

+

{item.body}

+
+ ))} +
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/login.js b/ambassadors/interchained-node-operator-portal/frontend/pages/login.js new file mode 100644 index 000000000..902c1c9a4 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/login.js @@ -0,0 +1,116 @@ +import { useState } from 'react'; +import { useRouter } from 'next/router'; +import GlassContainer from '../components/GlassContainer'; +import { useAuth } from '../context/AuthContext'; +import api from '../lib/api'; + +export default function LoginPage() { + const router = useRouter(); + const { login } = useAuth(); + const [email, setEmail] = useState(''); + const [password, setPassword] = useState(''); + const [fullName, setFullName] = useState(''); + const [inviteCode, setInviteCode] = useState(''); + const [mode, setMode] = useState('login'); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(''); + + const handleSubmit = async (event) => { + event.preventDefault(); + setLoading(true); + setError(''); + + try { + if (mode === 'register') { + await api.register({ email, password, full_name: fullName, invite_code: inviteCode }); + } + await login(email, password); + router.push('/dashboard'); + } catch (err) { + setError(err.message || 'Authentication failed'); + } finally { + setLoading(false); + } + }; + + return ( +
+ +
+
+

{mode === 'login' ? 'Login' : 'Register'}

+

+ {mode === 'login' + ? 'Authenticate with your operator account to access the portal.' + : 'Create an account to start earning node operator rewards.'} +

+
+
+ + {mode === 'register' && ( + + )} + + {mode === 'register' && ( + + )} +
+ {error &&

{error}

} + +

+ {mode === 'login' ? 'Need an account?' : 'Already registered?'}{' '} + +

+
+
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/nodes.js b/ambassadors/interchained-node-operator-portal/frontend/pages/nodes.js new file mode 100644 index 000000000..22c78e0d7 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/nodes.js @@ -0,0 +1,195 @@ +import { useEffect, useState } from 'react'; +import AdminShell from '../components/layout/AdminShell'; +import NodeTable from '../components/NodeTable'; +import { useAuth } from '../context/AuthContext'; +import api from '../lib/api'; + +const DEFAULT_FORM = { + name: '', + p2p_address: '', + rpc_url: '', + wallet_address: '', + tags: '', +}; + +export default function NodesPage() { + const { token, user } = useAuth(); + const [nodes, setNodes] = useState([]); + const [form, setForm] = useState(DEFAULT_FORM); + const [loading, setLoading] = useState(true); + const [message, setMessage] = useState(''); + const [error, setError] = useState(''); + + useEffect(() => { + if (!token) return; + async function load() { + try { + const data = await api.listNodes(token); + setNodes(data); + } catch (err) { + setError('Unable to load node inventory.'); + } finally { + setLoading(false); + } + } + load(); + }, [token]); + + const handleChange = (event) => { + const { name, value } = event.target; + setForm((prev) => ({ ...prev, [name]: value })); + }; + + const resetForm = () => { + setForm(DEFAULT_FORM); + }; + + const handleCreate = async (event) => { + event.preventDefault(); + if (!token) return; + setMessage(''); + setError(''); + try { + const payload = { + name: form.name, + p2p_address: form.p2p_address, + rpc_url: form.rpc_url, + wallet_address: form.wallet_address, + tags: form.tags + .split(',') + .map((tag) => tag.trim()) + .filter(Boolean), + }; + const created = await api.createNode(token, payload); + setNodes((prev) => [created, ...prev]); + resetForm(); + setMessage(`Node ${created.name} registered.`); + } catch (err) { + setError(err.message || 'Unable to register node.'); + } + }; + + const toggleFlag = async (node) => { + if (!token) return; + try { + const updated = await api.updateNode(token, node.id, { is_flagged: !node.is_flagged }); + setNodes((prev) => prev.map((entry) => (entry.id === updated.id ? updated : entry))); + } catch (err) { + setError('Failed to update node flag.'); + } + }; + + return ( + +
+
+
+

Registered Nodes

+ {nodes.length} total +
+ {loading ? ( +

Querying fleet telemetry…

+ ) : ( + + )} + {!loading && nodes.length > 0 && ( +
+

Flagged nodes

+
+ {nodes.filter((node) => node.is_flagged).length ? ( + nodes + .filter((node) => node.is_flagged) + .map((node) => ( +
+ {node.name} + +
+ )) + ) : ( +

No nodes currently flagged for review.

+ )} +
+
+ )} +
+ +
+

Provision Node

+

+ Register a node with the control plane to start receiving uptime scoring and reward attribution. Tags are optional + and help categorise infrastructure (e.g. edge, validator, apac). +

+
+ + + + + + +
+ {message &&

{message}

} + {error &&

{error}

} +
+
+
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/pages/rewards.js b/ambassadors/interchained-node-operator-portal/frontend/pages/rewards.js new file mode 100644 index 000000000..6f4c29100 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/pages/rewards.js @@ -0,0 +1,226 @@ +import { useEffect, useMemo, useState } from 'react'; +import AdminShell from '../components/layout/AdminShell'; +import RewardGraph from '../components/RewardGraph'; +import { useAuth } from '../context/AuthContext'; +import api from '../lib/api'; + +export default function RewardsPage() { + const { token, user } = useAuth(); + const [history, setHistory] = useState([]); + const [today, setToday] = useState({ rewards: {} }); + const [nodes, setNodes] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + const [exportError, setExportError] = useState(''); + const [exporting, setExporting] = useState(false); + const [poolAmount, setPoolAmount] = useState(''); + const [poolError, setPoolError] = useState(''); + const [poolSuccess, setPoolSuccess] = useState(''); + const [poolLoading, setPoolLoading] = useState(false); + + useEffect(() => { + if (!token) return; + async function load() { + try { + const [historyRes, todayRes, nodesRes] = await Promise.all([ + api.rewardHistory(token), + api.rewardSummary(token), + api.listNodes(token), + ]); + setHistory(historyRes.history || []); + setToday(todayRes); + setNodes(nodesRes || []); + } catch (err) { + setError('Unable to load reward data.'); + } finally { + setLoading(false); + } + } + load(); + }, [token]); + + const nodeIndex = useMemo(() => { + const map = new Map(); + nodes.forEach((node) => map.set(node.id, node)); + return map; + }, [nodes]); + + const graphData = useMemo(() => { + const totals = history.reduce((acc, item) => { + const key = new Date(item.date).toLocaleDateString(); + acc[key] = (acc[key] || 0) + Number(item.amount); + return acc; + }, {}); + return Object.entries(totals).map(([date, amount]) => ({ date, amount })); + }, [history]); + + const totalToday = useMemo(() => { + return Object.values(today.rewards || {}).reduce((sum, amount) => sum + Number(amount), 0); + }, [today]); + + const lifetime = useMemo(() => { + return history.reduce((sum, entry) => sum + Number(entry.amount), 0); + }, [history]); + + const handleExport = async () => { + if (!token) return; + setExportError(''); + setExporting(true); + try { + const { blob, filename } = await api.exportRewardsCsv(token); + const url = window.URL.createObjectURL(blob); + const link = document.createElement('a'); + link.href = url; + link.download = filename; + document.body.appendChild(link); + link.click(); + link.remove(); + window.URL.revokeObjectURL(url); + } catch (err) { + setExportError('Unable to export rewards CSV. Please try again.'); + } finally { + setExporting(false); + } + }; + + const handlePoolTopUp = async (event) => { + event.preventDefault(); + if (!token) return; + const amountValue = Number(poolAmount); + if (!Number.isFinite(amountValue) || amountValue <= 0) { + setPoolError('Enter a positive amount to fund the pool.'); + setPoolSuccess(''); + return; + } + setPoolError(''); + setPoolSuccess(''); + setPoolLoading(true); + try { + const response = await api.topUpRewardPool(token, amountValue); + setToday((prev) => ({ ...prev, pool_balance: response.balance })); + setPoolSuccess(`Pool balance updated to ${response.balance.toFixed(4)} ITC.`); + setPoolAmount(''); + } catch (err) { + setPoolError('Unable to update reward pool. Please try again.'); + } finally { + setPoolLoading(false); + } + }; + + return ( + + {loading ? ( +

Aggregating distribution records…

+ ) : error ? ( +

{error}

+ ) : ( +
+
+
+
+

Reward Velocity

+

Daily distributions

+
+
+
+
Lifetime distributed
+
{lifetime.toFixed(4)} ITC
+
+
+
Current pool balance
+
{Number(today.pool_balance || 0).toFixed(4)} ITC
+
+ +
+
+ {exportError && ( +

+ {exportError} +

+ )} + {user?.role === 'SUPER_ADMIN' && ( +
+
+ + setPoolAmount(e.target.value)} + placeholder="Amount in ITC" + className="mt-1 w-40 rounded-lg border border-slate-800 bg-slate-950/80 px-3 py-2 text-slate-100 focus:border-emerald-400/60 focus:outline-none" + /> +
+ +
+ {poolError &&

{poolError}

} + {poolSuccess &&

{poolSuccess}

} +
+
+ )} + +
+ +
+
+

Today's Allocations

+

Total pool distributed: {totalToday.toFixed(4)} ITC

+
+ {Object.entries(today.rewards || {}).map(([nodeId, amount]) => { + const node = nodeIndex.get(nodeId); + return ( +
+
+
{node?.name || nodeId}
+
{node?.wallet_address || 'Wallet not assigned'}
+
+
{Number(amount).toFixed(4)} ITC
+
+ ); + })} +
+
+ +
+

Recent Payout Events

+
+ {history.slice(-10).reverse().map((entry) => { + const node = nodeIndex.get(entry.node_id); + return ( +
+
+ {node?.name || entry.node_id} + {Number(entry.amount).toFixed(4)} ITC +
+
{new Date(entry.date).toLocaleString()}
+
+ ); + })} +
+
+
+
+ )} +
+ ); +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/postcss.config.js b/ambassadors/interchained-node-operator-portal/frontend/postcss.config.js new file mode 100644 index 000000000..12a703d90 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/ambassadors/interchained-node-operator-portal/frontend/public/preview.html b/ambassadors/interchained-node-operator-portal/frontend/public/preview.html new file mode 100644 index 000000000..4b87ff2a2 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/public/preview.html @@ -0,0 +1,255 @@ + + + + + Interchained Control Plane · Dashboard Preview + + + + + +
+
+ + +
+
+
+

Executive Overview

+

Demo Admin · Super Admin · org-enterprise

+
+
+
+

Demo Admin

+

Super Admin

+
+ +
+
+ +
+
+

Active Nodes

+

128

+

Nodes responding to orchestration

+
+
+

Avg Uptime

+

99.42%

+

Weighted across fleet

+
+
+

Flagged Nodes

+

4

+

Requires operator attention

+
+
+

Rewards Today

+

312.4821 ITC

+

Distributed during last cycle

+
+
+ +
+
+
+

Fleet Uptime

+ Last 7 Days +
+
+
+
+

Mon

+
+
+
+

Tue

+
+
+
+

Wed

+
+
+
+

Thu

+
+
+
+

Fri

+
+
+
+

Sat

+
+
+
+

Sun

+
+
+
+
+

Plan Mix

+
+
+ Enterprise + 62 +
+
+ Scale + 37 +
+
+ Growth + 21 +
+
+ Community + 8 +
+
+
+
+ +
+
+

Node Health Snapshot

+ 64 Active Nodes +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NodeLocationUptimeLatencyStatus
ic-node-12New York, US99.82%42 msHealthy
ic-node-44Frankfurt, DE99.64%57 msBoosted
ic-node-08Tokyo, JP96.04%81 msFlagged
ic-node-31São Paulo, BR91.24%114 msDegraded
+
+
+
+
+
+ + diff --git a/ambassadors/interchained-node-operator-portal/frontend/styles/globals.css b/ambassadors/interchained-node-operator-portal/frontend/styles/globals.css new file mode 100644 index 000000000..0b22613a3 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/styles/globals.css @@ -0,0 +1,21 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +body { + @apply bg-slate-950 text-slate-100 font-body min-h-screen; + background-image: radial-gradient(circle at top left, rgba(53, 211, 255, 0.25), transparent 45%), + radial-gradient(circle at top right, rgba(255, 45, 149, 0.2), transparent 50%); +} + +a { + @apply text-emerald-300 hover:text-emerald-100 transition-colors; +} + +.glass-panel { + @apply bg-glass-dark backdrop-blur-xl border border-slate-700/40 rounded-3xl shadow-neon; +} + +.neon-text { + @apply text-transparent bg-clip-text bg-gradient-to-r from-neon-pink via-neon-blue to-neon-purple font-heading; +} diff --git a/ambassadors/interchained-node-operator-portal/frontend/tailwind.config.js b/ambassadors/interchained-node-operator-portal/frontend/tailwind.config.js new file mode 100644 index 000000000..faf25dad0 --- /dev/null +++ b/ambassadors/interchained-node-operator-portal/frontend/tailwind.config.js @@ -0,0 +1,30 @@ +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + "./pages/**/*.{js,jsx}", + "./components/**/*.{js,jsx}", + ], + theme: { + extend: { + colors: { + neon: { + pink: "#ff2d95", + blue: "#35d3ff", + purple: "#8a63ff", + }, + glass: { + dark: "rgba(10, 12, 29, 0.85)", + light: "rgba(81, 92, 140, 0.35)", + } + }, + fontFamily: { + heading: ["Orbitron", "sans-serif"], + body: ["Inter", "sans-serif"], + }, + boxShadow: { + neon: "0 0 20px rgba(53, 211, 255, 0.45)", + }, + }, + }, + plugins: [], +};