diff --git a/AGENTS.md b/AGENTS.md index 122ab265..03dc26bb 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -32,22 +32,3 @@ After making changes in a specific package, run its check script: - **Imports**: External packages first, then local. Use `.ts` extensions for local imports. - **Bun APIs**: Prefer `Bun.file`, `Bun.serve`, `bun:sqlite`, `Bun.$` over Node equivalents. - **Testing**: Use `bun:test` with `import { test, expect } from "bun:test"`. - -## Better Context MCP - -Use Better Context MCP for documentation/resource questions when you need source-first answers. - -**Required workflow** -1. Call `listResources` first to see available resources. -2. Call `ask` with your question and the exact resource `name` values from step 1. - -**Rules** -- Always call `listResources` before `ask`. -- `ask` requires at least one resource in the `resources` array. -- Use only resource names returned by `listResources`. -- Include only resources relevant to the question. - -**Common errors** -- "Invalid resources" → re-run `listResources` and use exact names. -- "Instance is provisioning / error state" → wait or retry after a minute. -- "Missing or invalid Authorization header" → MCP auth is invalid; fix it in `https://btca.dev/app/settings/mcp/`. \ No newline at end of file diff --git a/DEPLOY_INSTRUCTIONS.md b/DEPLOY_INSTRUCTIONS.md new file mode 100644 index 00000000..50f38e3d --- /dev/null +++ b/DEPLOY_INSTRUCTIONS.md @@ -0,0 +1,378 @@ +# BTCA v2 Deploy Instructions + +This document covers the deployment steps for the Phase 3+ database changes (projects support and remote mode). + +--- + +## ⚠️ CRITICAL: Pre-Deploy Code Changes + +Before deploying, ensure these development overrides are reverted: + +### CLI Remote URL + +In `apps/cli/src/client/remote.ts`, change the `DEFAULT_REMOTE_URL` back to production: + +```typescript +// Change FROM (development): +const DEFAULT_REMOTE_URL = 'http://localhost:5173'; + +// Change TO (production): +const DEFAULT_REMOTE_URL = 'https://btca.dev'; +``` + +--- + +## Pre-Deploy Checklist + +- [ ] Backup production Convex database (optional but recommended) +- [ ] Ensure all schema changes are committed +- [ ] Test migration in dev environment first + +--- + +## Schema Changes Overview + +Phase 3 introduces: + +1. **New `projects` table** - Each instance can have multiple projects +2. **New `mcpQuestions` table** - Records MCP questions/answers per project +3. **Added `projectId`** (optional) to `threads`, `userResources`, `cachedResources` + +These changes are **backward compatible** - the `projectId` fields are optional, and the MCP API accepts an optional `project` parameter that defaults to "default". + +--- + +## Deployment Steps + +### 1. Deploy Schema Changes + +The schema changes will be applied automatically when you deploy to Convex: + +```bash +# For development +cd apps/web +bunx convex dev + +# For production +cd apps/web +bunx convex deploy +``` + +Convex will: + +- Add the new `projects` and `mcpQuestions` tables +- Add the optional `projectId` field to existing tables +- Add new indexes +- Install the migrations component + +**No data loss** - existing records simply won't have a `projectId` set yet. + +### 2. Run the Migration + +After the schema is deployed, run the migration using the Convex migrations component. + +#### Option A: Run All Migrations via CLI (Recommended) + +Run all migrations in sequence with a single command: + +```bash +# For development +cd apps/web +bunx convex run migrations:runAll + +# For production +cd apps/web +bunx convex run migrations:runAll --prod +``` + +This will: + +1. Migrate all `threads` without a projectId → assigns to default project (creates if needed) +2. Migrate all `userResources` without a projectId → assigns to default project +3. Migrate all `cachedResources` without a projectId → assigns to default project + +The migration component will: + +- Skip records already migrated +- Resume from where it left off if interrupted +- Run in batches asynchronously + +#### Option B: Run Individual Migrations + +If you prefer more control, run each migration individually: + +```bash +# 1. Migrate threads +bunx convex run migrations:run '{fn: "migrations:migrateThreadsToProject"}' + +# 2. Migrate user resources +bunx convex run migrations:run '{fn: "migrations:migrateUserResourcesToProject"}' + +# 3. Migrate cached resources +bunx convex run migrations:run '{fn: "migrations:migrateCachedResourcesToProject"}' +``` + +Add `--prod` flag for production. + +#### Option C: Dry Run First + +Test a migration without committing changes: + +```bash +bunx convex run migrations:run '{fn: "migrations:migrateThreadsToProject", dryRun: true}' +``` + +### 3. Monitor Migration Status + +#### Check Overall Status + +```bash +bunx convex run internal.migrations:getMigrationStatus +``` + +This shows: + +- Total instances +- Total projects +- Records without projectId (threads, userResources, cachedResources) +- Whether migration is complete + +#### Watch Migration Progress (Live) + +```bash +bunx convex run --component migrations lib:getStatus --watch +``` + +#### Check Instances Without Default Project + +```bash +bunx convex run internal.migrations:getInstancesWithoutDefaultProject +``` + +### 4. Verify Deployment + +After migration, verify: + +1. **MCP commands still work** - Test `listResources` and `ask` without the `project` parameter +2. **Web app loads** - Existing threads should still be visible +3. **New projects can be created** - Test creating a project via the API + +--- + +## Migration Component Operations + +### Stop a Running Migration + +```bash +bunx convex run --component migrations lib:cancel '{name: "migrations:migrateThreadsToProject"}' +``` + +### Restart a Migration from Beginning + +```bash +bunx convex run migrations:run '{fn: "migrations:migrateThreadsToProject", cursor: null}' +``` + +### Create Missing Default Projects Only + +If you just want to create default projects without migrating records: + +```bash +bunx convex run internal.migrations:createMissingDefaultProjects +``` + +--- + +## Rollback Plan + +If issues occur: + +1. **Schema rollback is not needed** - The new fields are optional and don't break existing functionality +2. **If migration caused issues** - The `projectId` fields can be set back to `undefined` if needed (though this shouldn't be necessary) + +--- + +## Post-Deploy Notes + +### Sandbox Changes (Future - Phase 5) + +The sandbox currently ignores the `project` parameter passed in requests. In Phase 5, the sandbox will: + +1. Create project-specific directories: `/root/.local/share/btca/projects/{project-name}/` +2. Store project-specific configs and resources separately +3. Allow different projects to have resources with the same name pointing to different repos + +See `apps/sandbox/README.md` for the planned file system structure. + +### MCP API Changes + +The MCP API now accepts an optional `project` parameter: + +```typescript +// Before (still works) +ask({ apiKey, question, resources }); + +// After (new) +ask({ apiKey, question, resources, project: 'my-project' }); +``` + +If `project` is not provided, it defaults to "default". + +--- + +## Troubleshooting + +### Migration seems stuck + +The migrations component tracks progress and can resume. Check status with: + +```bash +bunx convex run --component migrations lib:getStatus --watch +``` + +If needed, cancel and restart: + +```bash +bunx convex run --component migrations lib:cancel '{name: "migrations:migrateThreadsToProject"}' +bunx convex run migrations:run '{fn: "migrations:migrateThreadsToProject", cursor: null}' +``` + +### Records still show no projectId after migration + +The migrations component processes in batches. Check if migration is still running: + +```bash +bunx convex run --component migrations lib:getStatus +``` + +If status shows completed but records remain, the customRange filter may need adjustment. Re-run the migration to catch any stragglers: + +```bash +bunx convex run migrations:runAll +``` + +### "Migration already running" error + +Wait for the current run to complete, or cancel it: + +```bash +bunx convex run --component migrations lib:cancel '{name: "migrations:migrateThreadsToProject"}' +``` + +--- + +## Phase 5: Remote Mode Changes + +Phase 5 introduces CLI remote mode commands and new MCP tools. These are **backward compatible** and don't require database migrations. + +### New Features + +1. **CLI Remote Commands** (`btca remote ...`): + - `btca remote link` - Authenticate with btca cloud via API key + - `btca remote unlink` - Remove authentication + - `btca remote status` - Show instance and project status + - `btca remote wake` - Pre-warm the sandbox + - `btca remote add ` - Add resource to remote config and sync + - `btca remote sync` - Sync local config with cloud + - `btca remote ask` - Ask questions via cloud + - `btca remote grab ` - Output thread transcript + - `btca remote init` - Initialize a remote config file + +2. **New MCP Tools**: + - `addResource` - Add a git resource via MCP + - `sync` - Sync a local config with cloud + +3. **CLI API Endpoints** (`/api/cli/...`): + - `GET /api/cli/status` - Instance and project status + - `POST /api/cli/wake` - Wake the sandbox + - `GET /api/cli/threads` - List threads + - `GET /api/cli/threads/:id` - Get thread with messages + - `GET /api/cli/projects` - List projects + - `GET /api/cli/questions` - List MCP questions + +### Configuration Files + +**Remote config** (`btca.remote.config.jsonc`): + +```jsonc +{ + "$schema": "https://btca.dev/btca.remote.schema.json", + "project": "my-project", + "model": "claude-sonnet", + "resources": [ + { + "type": "git", + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPath": "apps/svelte.dev" + } + ] +} +``` + +**Remote auth** (`~/.config/btca/remote-auth.json`): + +```json +{ + "apiKey": "btca_xxxx...", + "linkedAt": 1234567890 +} +``` + +### Deployment Steps + +1. **Deploy Schema/Code Changes**: + + ```bash + # Deploy to Convex (includes new cli.ts actions) + cd apps/web + bunx convex deploy + ``` + +2. **Deploy Web App**: + + ```bash + # Build and deploy the web app (includes new API routes) + bun run build + # Deploy to your hosting provider + ``` + +3. **Publish CLI**: + ```bash + # Build and publish the CLI with new remote commands + cd apps/cli + bun run build + npm publish + ``` + +### Testing + +1. **Test CLI Remote Link**: + + ```bash + # Create an API key in the web app at /app/settings/mcp + btca remote link --key btca_your_key_here + ``` + +2. **Test Remote Status**: + + ```bash + btca remote status + ``` + +3. **Test MCP addResource**: + + ```bash + # Via MCP client or curl + curl -X POST https://btca.dev/api/mcp \ + -H "Authorization: Bearer btca_your_key" \ + -H "Content-Type: application/json" \ + -d '{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"addResource","arguments":{"url":"https://github.com/owner/repo","name":"repo","branch":"main"}}}' + ``` + +4. **Test CLI API**: + ```bash + curl -X GET https://btca.dev/api/cli/status \ + -H "Authorization: Bearer btca_your_key" + ``` diff --git a/IMPLEMENTATION_PLAN.md b/IMPLEMENTATION_PLAN.md new file mode 100644 index 00000000..c531f0ca --- /dev/null +++ b/IMPLEMENTATION_PLAN.md @@ -0,0 +1,614 @@ +# BTCA v2 Implementation Plan + +This document outlines the complete implementation plan for the BTCA v2 refactor, introducing local and remote modes with a unified agent core. + +--- + +## Overview + +BTCA v2 introduces two distinct operational modes: + +- **Local Mode**: CLI/TUI running on the user's machine, repos cloned locally, using their own API keys +- **Remote Mode**: Cloud service with web app and MCP, repos cached in the cloud, subscription-based + +Both modes share the same agent core, which moves from spawning OpenCode instances to a custom AI SDK loop with opencode's auth system. + +--- + +## Phase 1+2: Auth, Provider & Agent Core + +### Goals + +- Remove OpenCode as the agent runner (keep only for auth) +- Build custom AI SDK agent loop with 4 tools (read, grep, glob, list) +- Support all 40+ providers through opencode's Auth module +- Make the agent implementation identical for local and remote + +### 1. Add OpenCode as Server Dependency + +Update `apps/server/package.json` to include opencode for auth only: + +```json +{ + "dependencies": { + "opencode": "^x.x.x", + "ai": "^5.x", + "@ai-sdk/anthropic": "^x.x.x", + "@ai-sdk/openai": "^x.x.x" + // ... other providers as needed + } +} +``` + +### 2. Provider Abstraction Layer + +Create `apps/server/src/providers/` with: + +- `auth.ts` - Wrapper around opencode's Auth module + - `getCredentials(providerId)` - Get stored credentials + - `isAuthenticated(providerId)` - Check if provider is authed +- `registry.ts` - Provider factory registry + - Map of provider IDs to AI SDK factory functions + - Support for: anthropic, openai, google, azure, groq, mistral, xai, etc. +- `model.ts` - Model instantiation + - `getModel(providerId, modelId)` - Create AI SDK model with auth + - Handle both API key and OAuth auth types + +### 3. Ripgrep Binary Management + +Create `apps/server/src/tools/ripgrep.ts`: + +- Check if `rg` exists in PATH +- If not, download pre-built binary from GitHub releases +- Support platforms: darwin-arm64, darwin-x64, linux-arm64, linux-x64, windows-x64 +- Cache binary in `~/.local/share/btca/bin/` +- Make executable (chmod 755) + +### 4. Agent Tools Implementation + +Create `apps/server/src/tools/` with sandboxed implementations: + +**`read.ts`** + +- Parameters: `{ path: string, offset?: number, limit?: number }` +- Read file contents with line numbers +- Truncation: max 2000 lines, 50KB, 2000 chars per line +- Handle images/PDFs as base64 attachments +- Detect binary files (null byte check) +- Path validation: must be within collections directory + +**`grep.ts`** + +- Parameters: `{ pattern: string, path?: string, include?: string }` +- Execute ripgrep subprocess +- Parse output format: `filepath|lineNum|lineText` +- Sort by modification time +- Max 100 results +- Path validation: search path must be within collections directory + +**`glob.ts`** + +- Parameters: `{ pattern: string, path?: string }` +- Use ripgrep's `--files` with glob pattern +- Sort by modification time (most recent first) +- Max 100 results +- Path validation: search path must be within collections directory + +**`list.ts`** + +- Parameters: `{ path: string }` +- List directory contents +- Return file/directory names with types +- Path validation: must be within collections directory + +### 5. Path Sandboxing + +All tools must validate paths stay within the collections directory: + +- Resolve all paths to absolute +- Check that resolved path starts with collections base path +- Reject paths with `..` that escape +- Handle symlinks: resolve and validate target + +### 6. Custom Agent Loop + +Create `apps/server/src/agent/loop.ts`: + +- Use AI SDK's `streamText` with tools +- System prompt: expert at searching collections, restricted to read-only tools +- Initial context: `ls` of collections directory +- Tool execution with path sandboxing +- Stream events back to caller +- Handle tool calls, text responses, errors + +### 7. Agent Service Refactor + +Update `apps/server/src/agent/service.ts`: + +- Remove OpenCode SDK dependency for agent execution +- Keep provider validation logic +- Use new custom agent loop +- Maintain same external API (`ask`, `askStream`) + +### 8. Testing + +- Unit tests for each tool +- Integration test: full agent loop with mock provider +- Path sandboxing tests (escape attempts) +- Provider auth tests + +--- + +## Phase 3: Database & Migration Prep + +### Goals + +- Add project support to database schema +- Migrate existing users to "default" project +- Backward-compatible API changes + +### 1. Schema Changes + +Add to `apps/web/src/convex/schema.ts`: + +**New `projects` table:** + +- `instanceId` - Reference to instance +- `name` - Project name (unique per instance) +- `model` - Selected model key +- `createdAt` +- `isDefault` - Boolean, true for auto-created default project + +**Add `projectId` to existing tables:** + +- `threads` - Add optional `projectId` field +- `userResources` - Add optional `projectId` field +- `cachedResources` - Add optional `projectId` field + +**New `mcpQuestions` table:** + +- `projectId` - Reference to project +- `question` - The question asked +- `resources` - Array of resource names used +- `answer` - The response +- `createdAt` + +### 2. Migration Script + +Create Convex migration to: + +1. For each existing instance: + - Create a "default" project with `isDefault: true` + - Update all threads to reference default project + - Update all userResources to reference default project + - Update all cachedResources to reference default project + +2. Ensure migration is idempotent (can run multiple times safely) + +### 3. API Changes + +Update Convex actions/queries: + +- `mcp.listResources` - Accept optional `projectId`, default to default project +- `mcp.ask` - Accept optional `projectId`, default to default project +- Add `projects.list` - List projects for instance +- Add `projects.create` - Create new project +- Add `projects.get` - Get project by name +- Add `mcpQuestions.list` - List questions for project + +### 4. Backward Compatibility + +MCP commands without project specified: + +- Look for `btca.remote.config.jsonc` in working directory +- If found, use project name from config +- If not found, use "default" project + +--- + +## Phase 4: Local Mode + +### Goals + +- New config schema with provider/model +- Simplified CLI commands +- TUI for interactive chat +- `btca ask` for agent-to-agent context + +### 1. Config Schema + +**`btca.config.jsonc` (project-level):** + +```jsonc +{ + "$schema": "https://btca.dev/btca.schema.json", + "provider": "anthropic", // Optional, prompted if missing + "model": "claude-sonnet-4-20250514", // Optional, prompted if missing + "dataDirectory": ".btca", // Optional, default: .btca + "resources": [ + { + "type": "git", + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPaths": ["apps/svelte.dev"], + "specialNotes": "Focus on content directory" + }, + { + "type": "local", + "name": "myDocs", + "path": "./docs", + "specialNotes": "Internal documentation" + } + ] +} +``` + +**Global config (`~/.config/btca/btca.config.jsonc`):** + +- Same schema as project config +- Merged with project config (project wins on conflict) +- Resources from both are combined + +### 2. Config Merging Logic + +When loading config: + +1. Load global config if exists +2. Load project config if exists (from cwd or parent directories) +3. Merge: project provider/model override global +4. Merge: combine resources, project version wins on name conflict + +### 3. CLI Commands + +**`btca` (default)** + +- Launch TUI for interactive chat +- Options: `--no-tui` for REPL mode + +**`btca init`** + +- Interactive setup wizard +- Prompts: local vs global storage, initial resources +- Creates `btca.config.jsonc` +- Updates `AGENTS.md` with usage instructions + +**`btca add `** + +- Add a git resource +- Options: + - `-g, --global` - Add to global config + - `-n, --name ` - Resource name (prompted if not provided) + - `-b, --branch ` - Branch (default: main) + - `-s, --search-path ` - Search path within repo (can specify multiple) + - `--notes ` - Special notes for the agent +- Without flags: interactive wizard + +**`btca remove `** + +- Remove a resource by name +- Options: + - `-g, --global` - Remove from global config +- If name not provided: interactive selection + +**`btca connect`** + +- Configure provider/model +- If already authed with opencode: just set in config +- If not authed: guide through opencode auth flow, then set +- Options: + - `-g, --global` - Set in global config + - `-p, --provider ` - Provider ID + - `-m, --model ` - Model ID + +**`btca ask`** + +- One-shot question with streaming response +- Options: + - `-q, --question ` - Required + - `-r, --resource ` - Resources to query (multiple allowed) +- Supports `@mentions` in question text + +**`btca serve`** + +- Start standalone server +- Options: + - `-p, --port ` - Port (default: 8080) + +**`btca clear`** + +- Clear locally cloned resources +- Returns count of cleared resources + +### 4. TUI Implementation + +Update existing TUI to: + +- Use new agent loop (not OpenCode) +- Show streaming responses +- Resource selection +- Model/provider display + +--- + +## Phase 5: Remote Mode + +### Goals + +- Remote config schema +- CLI commands for remote operations +- MCP commands with project support +- Web app project system + +### 1. Remote Config Schema + +**`btca.remote.config.jsonc` (project-level):** + +```jsonc +{ + "$schema": "https://btca.dev/btca.remote.schema.json", + "project": "my-project", // Required, unique identifier + "model": "claude-sonnet", // From preset list + "resources": [ + { + "type": "git", + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPaths": ["apps/svelte.dev"], + "specialNotes": "Focus on content directory" + } + ] +} +``` + +**Available models (preset list):** + +- `claude-sonnet` - Claude Sonnet (default) +- `claude-haiku` - Claude Haiku (faster, cheaper) +- `gpt-4o` - GPT-4o +- `gpt-4o-mini` - GPT-4o Mini + +### 2. Auth Storage + +**`~/.config/btca/remote-auth.json`:** + +```json +{ + "apiKey": "btca_xxxx...", + "linkedAt": 1234567890 +} +``` + +### 3. CLI Commands + +**`btca remote link`** + +- Authenticate with remote instance +- Opens browser for OAuth flow +- Stores API key in `~/.config/btca/remote-auth.json` +- Validates key works + +**`btca remote add `** + +- Add resource to remote config and sync to cloud +- Options: same as `btca add` +- Creates/updates `btca.remote.config.jsonc` +- Syncs resource to cloud project + +**`btca remote sync`** + +- Sync local config with cloud +- Pull: resources in cloud but not local → add to local config +- Push: resources in local but not cloud → add to cloud +- Conflict handling: + - If resource exists in both with different config → ERROR + - User must either `--force` to push local, or update local to match +- Options: + - `--force` - Push local config, overwrite cloud on conflicts + +**`btca remote wake`** + +- Pre-warm the cloud sandbox +- Returns when sandbox is ready +- Useful before starting a session + +**`btca remote status`** + +- Show sandbox status (awake/asleep) +- Show project info +- Show resource count + +**`btca remote grab `** + +- Output full transcript of a thread +- Formatted for easy copy/paste to agents +- Options: + - `--json` - Output as JSON + - `--markdown` - Output as markdown (default) + +**`btca remote ask`** + +- Same as `btca ask` but hits cloud sandbox +- Options: same as `btca ask` +- Uses remote config for project context + +### 4. MCP Commands + +**`listResources`** (existing, updated) + +- Now scoped to project +- Uses project from config or "default" + +**`ask`** (existing, updated) + +- Parameters: `{ question: string, resources: string[], project?: string }` +- Project optional, defaults to "default" or config value +- Records question/answer in `mcpQuestions` table + +**`addResource`** (new) + +- Parameters: `{ url: string, name: string, branch?: string, searchPaths?: string[], notes?: string }` +- Adds resource to cloud project +- Same as `btca remote add` over MCP + +**`sync`** (new) + +- Parameters: `{ config: string }` - Full text of local `btca.remote.config.jsonc` +- Parses config, validates, syncs to cloud +- Returns: `{ ok: boolean, errors?: string[], synced: string[] }` +- Should be called when agent tries to use resource that's not in cloud + +### 5. Web App Updates + +**Project Selector:** + +- Dropdown in header to switch projects +- "default" project always exists +- Create new project button + +**Project View:** + +- Resources tab: list/add/remove resources +- Threads tab: conversation threads for this project +- Questions tab: MCP questions asked for this project +- Settings tab: model selection, project name + +**Questions Tab:** + +- Shows questions asked via MCP +- Question text, resources used, answer, timestamp +- Useful for seeing what agents are asking + +--- + +## Migration Strategy + +### Remote Users (Existing) + +**Automatic migration on first request after update:** + +1. Check if user has a "default" project +2. If not: + - Create "default" project with `isDefault: true` + - Move all existing threads → default project + - Move all existing userResources → default project + - Move all existing cachedResources → default project + +**MCP backward compatibility:** + +- Commands without `project` parameter use "default" project +- Existing MCP integrations continue working unchanged +- New integrations can specify project + +### Local Users (Existing) + +**Config compatibility:** + +- Existing `btca.config.jsonc` files work unchanged +- `provider` and `model` fields are optional +- If missing, user is prompted on first `btca ask` or TUI launch +- `btca connect` command to set provider/model + +**Behavior changes:** + +- `btca chat` command removed (was OpenCode TUI) +- `btca config model` and `btca config resources` removed +- Use `btca add`, `btca remove`, `btca connect` instead + +--- + +## Architecture Diagram + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ BTCA v2 Architecture │ +└─────────────────────────────────────────────────────────────────┘ + +LOCAL MODE REMOTE MODE +┌─────────────────┐ ┌─────────────────┐ +│ btca CLI │ │ Web App │ +│ - add/remove │ │ - Projects │ +│ - connect │ │ - Threads │ +│ - ask │ │ - Questions │ +│ - TUI │ │ - Resources │ +└────────┬────────┘ └────────┬────────┘ + │ │ + │ HTTP │ Convex + ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ +│ btca-server │ │ Convex DB │ +│ (local) │ │ - projects │ +│ │ │ - threads │ +│ ┌───────────┐ │ │ - messages │ +│ │ Agent │ │ │ - resources │ +│ │ - read │ │ └────────┬────────┘ +│ │ - grep │ │ │ +│ │ - glob │ │ │ HTTP +│ │ - list │ │ ▼ +│ └───────────┘ │ ┌─────────────────┐ +│ │ │ Daytona │ +│ ┌───────────┐ │ │ Sandbox │ +│ │ Providers │ │ │ │ +│ │ (AI SDK) │ │ │ ┌───────────┐ │ +│ └───────────┘ │ │ │ btca- │ │ +│ │ │ │ server │ │ +│ ┌───────────┐ │ │ │ (same!) │ │ +│ │ OpenCode │ │ │ └───────────┘ │ +│ │ Auth │ │ │ │ +│ └───────────┘ │ └─────────────────┘ +└─────────────────┘ │ + │ │ + │ │ MCP + ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ +│ Local Repos │ │ MCP Clients │ +│ (.btca/) │ │ (Cursor, etc) │ +└─────────────────┘ └─────────────────┘ + + +SHARED COMPONENTS: +┌─────────────────────────────────────────────────────────────────┐ +│ apps/server/src/agent/ │ +│ - loop.ts (AI SDK streamText with tools) │ +│ - tools/ (read, grep, glob, list) │ +│ - providers/ (auth wrapper, model factory) │ +│ │ +│ Same code runs locally AND in cloud sandbox │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Success Criteria + +### Phase 1+2 Complete When: + +- [x] Can authenticate with any provider via opencode Auth +- [x] Custom agent loop answers questions using 4 tools +- [x] All tools respect path sandboxing +- [x] Ripgrep downloads automatically if not installed +- [x] Existing `btca ask` command works with new agent + +### Phase 3 Complete When: + +- [x] Projects table exists in Convex +- [x] Migration script moves existing data to "default" project +- [x] MCP commands work with and without project parameter +- [x] No breaking changes for existing MCP users + +### Phase 4 Complete When: + +- [x] `btca add`, `btca remove`, `btca connect` commands work +- [x] Config merging (global + project) works correctly +- [x] TUI launches and uses new agent +- [x] Provider/model prompting works when not configured + +### Phase 5 Complete When: + +- [x] `btca remote link` authenticates successfully +- [x] `btca remote sync` handles conflicts correctly +- [x] MCP `addResource` and `sync` commands work +- [x] Web app shows projects with resources/threads/questions +- [x] `btca remote grab` outputs thread transcripts +- [x] Web app shows projects with resources/threads/questions diff --git a/INIT_MCP_CLI_REFACTOR_PLAN.md b/INIT_MCP_CLI_REFACTOR_PLAN.md deleted file mode 100644 index f16dd397..00000000 --- a/INIT_MCP_CLI_REFACTOR_PLAN.md +++ /dev/null @@ -1,157 +0,0 @@ -# btca init + btca add Refactor Plan - -Date: 2026-01-25 - -## Goals - -- Update `btca init` to offer two setup paths: - - **MCP (cloud hosted resources)** - - **CLI (local resources)** -- For **MCP**, do **not** create `btca.config.jsonc` for now. -- For **CLI**, create `btca.config.jsonc`, update `AGENTS.md`, and update `.gitignore`. -- Provide next-step instructions tailored to the chosen path: - - **MCP:** point user to the MCP dashboard. - - **CLI:** confirm setup is complete and suggest next actions. -- Add a new top-level `btca add` command that: - - Accepts a **GitHub URL only** (e.g. `btca add https://github.com/owner/repo`). - - Prefills fields inferred from GitHub metadata. - - Uses an interactive terminal wizard so the user can confirm/edit each field. - - Writes to **project config** by default, or **global config** with `-g`. - -## Scope Overview - -- **CLI:** `apps/cli` -- No changes to server APIs required (use existing config/resources plumbing). -- `btca init` becomes a guided setup; `btca add` becomes a guided resource wizard. - ---- - -## 1) `btca init` Refactor - -### UX Flow - -1. User runs `btca init` (no flags). -2. Prompt: - - “Choose setup type:” - - `1) MCP (cloud hosted resources)` - - `2) CLI (local resources)` -3. Based on selection: - -#### MCP Path (cloud hosted resources) - -- **Do not** create `btca.config.jsonc` for now. -- Update `AGENTS.md` to include the MCP instructions section. -- Update `.gitignore` (only if needed; no `.btca` for MCP). -- Print next steps: - - “Get your MCP API key from the dashboard: https://btca.dev/app/settings/mcp/” - - “Configure your MCP client with the Better Context endpoint.” - -#### CLI Path (local resources) - -- Create `btca.config.jsonc` with default model/provider + empty `resources`. -- Update `AGENTS.md` with the CLI instructions section (existing btca section or insert). -- Update `.gitignore` if `.btca` local data dir is used (current behavior). -- Print next steps: - - “btca config resources add …” - - “btca ask -r -q …” - - Confirm setup is complete. - -### Implementation Notes - -- **Command file:** `apps/cli/src/commands/init.ts` -- Replace the current `--local` flow with an interactive selection. -- Keep `--force` behavior for config overwrite (CLI path only). -- For MCP path, `--force` should not be necessary (no config file created). -- If a config file already exists: - - CLI path: warn and require `--force` to overwrite. - - MCP path: do not overwrite; still update `AGENTS.md` and show next steps. - -### Output Text - -- Ensure output uses explicit next steps and the MCP dashboard link. -- Add a short “Setup complete” confirmation message for CLI path. - ---- - -## 2) `btca add` Wizard (new command) - -### UX Flow - -Command: - -``` -btca add https://github.com/owner/repo -``` - -Optional global: - -``` -btca add -g https://github.com/owner/repo -``` - -Wizard Steps (prompts should allow edit/confirm): - -1. **URL** (prefilled from arg) -2. **Name** (default = repo name, e.g. `repo`) -3. **Branch** (default = repo default branch; fallback to `main` if unknown) -4. **Search paths** (optional; allow empty) -5. **Notes** (optional) -6. **Confirm summary** → write to config - -### Behavior - -- Accept only GitHub URLs for now; validate and error clearly otherwise. -- Use GitHub URL parsing to infer `owner/repo` and default name. -- Attempt to resolve default branch if possible (if not, default `main`). -- Use existing “add resource” plumbing (current `btca config resources add` flow). -- `-g` writes to `~/.config/btca/btca.config.jsonc` (global) instead of project config. - -### Implementation Notes - -- **New command file:** `apps/cli/src/commands/add.ts` (or inline in `apps/cli/src/index.ts`) -- **Config writing path:** reuse existing config helpers (if present) or create new helper to write config for project/global. -- **Prompting:** use Node readline or existing prompt utilities if available. -- **Validation:** GitHub URL parser should handle: - - `https://github.com/owner/repo` - - `https://github.com/owner/repo.git` - ---- - -## 3) AGENTS.md Updates - -### MCP Section (to insert when MCP path chosen) - -``` -## Better Context MCP - -Use Better Context MCP for documentation/resource questions when you need source‑first answers. - -**Required workflow** -1. Call `listResources` first to see available resources. -2. Call `ask` with your question and the exact resource `name` values from step 1. - -**Rules** -- Always call `listResources` before `ask`. -- `ask` requires at least one resource in the `resources` array. -- Use only resource names returned by `listResources`. -- Include only resources relevant to the question. - -**Common errors** -- “Invalid resources” → re-run `listResources` and use exact names. -- “Instance is provisioning / error state” → wait or retry after a minute. -- “Missing or invalid Authorization header” → MCP auth is invalid; fix it in `https://btca.dev/app/settings/mcp/`. -``` - -### CLI Section - -- Use existing CLI section content (current `AGENTS.md` btca section) or template from `apps/web/src/lib/assets/docs/example-AGENTS-section.md`. -- Ensure it documents `btca ask` usage and where the config lives. - ---- - -## 4) Questions / Follow-ups - -- Should the CLI setup path still support `--local` to use `.btca` data directory, or should it default to global data always? -- For `btca add`, do we want to support multiple search paths via repeated prompt entries or comma-separated input? -- Should `btca add` automatically call `btca init` if no config exists in project path? - diff --git a/PROJECT_REFERENCE.md b/PROJECT_REFERENCE.md new file mode 100644 index 00000000..6b98f7a1 --- /dev/null +++ b/PROJECT_REFERENCE.md @@ -0,0 +1,917 @@ +# BTCA v2 Project Reference + +Complete reference for BTCA v2 architecture, CLI commands, MCP tools, and data model. + +--- + +## Table of Contents + +1. [Architecture Overview](#architecture-overview) +2. [CLI Commands](#cli-commands) +3. [MCP Tools](#mcp-tools) +4. [Configuration Files](#configuration-files) +5. [Data Model](#data-model) +6. [Server API](#server-api) + +--- + +## Architecture Overview + +BTCA operates in two modes: + +### Local Mode + +- **Interface**: CLI (TUI or REPL) +- **Agent Location**: User's machine +- **Repo Storage**: Local filesystem (`.btca/` or `~/.local/share/btca/`) +- **Auth**: OpenCode auth system (user's API keys) +- **Cost**: Token cost through user's provider subscription + +### Remote Mode + +- **Interface**: Web app + MCP +- **Agent Location**: Daytona cloud sandbox +- **Repo Storage**: Cloud (cached per project) +- **Auth**: BTCA API key (subscription-based) +- **Cost**: $8/mo subscription + +### Shared Components + +Both modes use identical agent code in `apps/server`: + +``` +apps/server/ +├── src/ +│ ├── agent/ +│ │ ├── loop.ts # AI SDK streamText loop +│ │ ├── service.ts # Agent service interface +│ │ └── types.ts +│ ├── tools/ +│ │ ├── read.ts # File reading +│ │ ├── grep.ts # Regex search (ripgrep) +│ │ ├── glob.ts # File pattern matching +│ │ ├── list.ts # Directory listing +│ │ └── ripgrep.ts # Binary management +│ ├── providers/ +│ │ ├── auth.ts # OpenCode auth wrapper +│ │ ├── registry.ts # Provider factories +│ │ └── model.ts # Model instantiation +│ └── ... +``` + +--- + +## CLI Commands + +### Global Options + +All commands support: + +- `--server ` - Use existing btca server URL +- `--port ` - Port for auto-started server + +--- + +### Core Commands + +#### `btca` + +Launch interactive TUI (default command). + +```bash +btca [options] +``` + +**Options:** +| Flag | Description | +|------|-------------| +| `--no-tui` | Use REPL mode instead of TUI | + +--- + +#### `btca init` + +Initialize project configuration. + +```bash +btca init [options] +``` + +**Options:** +| Flag | Description | +|------|-------------| +| `-f, --force` | Overwrite existing configuration | + +**Behavior:** + +- Interactive wizard for setup +- Creates `btca.config.jsonc` in current directory +- Updates `AGENTS.md` with usage instructions + +--- + +#### `btca add ` + +Add a git repository as a resource. + +```bash +btca add [options] +``` + +**Arguments:** +| Argument | Required | Description | +|----------|----------|-------------| +| `url` | Yes | GitHub repository URL | + +**Options:** +| Flag | Description | +|------|-------------| +| `-g, --global` | Add to global config instead of project | +| `-n, --name ` | Resource name (prompted if omitted) | +| `-b, --branch ` | Branch to use (default: main) | +| `-s, --search-path ` | Subdirectory to search (repeatable) | +| `--notes ` | Special notes for the agent | + +**Examples:** + +```bash +# Interactive wizard +btca add https://github.com/sveltejs/svelte.dev + +# Fully specified (agent-friendly) +btca add https://github.com/sveltejs/svelte.dev \ + -n svelte \ + -b main \ + -s apps/svelte.dev/src/content \ + --notes "Focus on documentation content" + +# Add to global config +btca add -g https://github.com/tj/commander.js +``` + +--- + +#### `btca remove ` + +Remove a resource from configuration. + +```bash +btca remove [name] [options] +``` + +**Arguments:** +| Argument | Required | Description | +|----------|----------|-------------| +| `name` | No | Resource name (interactive selection if omitted) | + +**Options:** +| Flag | Description | +|------|-------------| +| `-g, --global` | Remove from global config | + +**Examples:** + +```bash +# Interactive selection +btca remove + +# Direct removal +btca remove svelte + +# Remove from global +btca remove -g commander +``` + +--- + +#### `btca connect` + +Configure AI provider and model. + +```bash +btca connect [options] +``` + +**Options:** +| Flag | Description | +|------|-------------| +| `-g, --global` | Set in global config | +| `-p, --provider ` | Provider ID (e.g., anthropic, openai) | +| `-m, --model ` | Model ID (e.g., claude-sonnet-4-20250514) | + +**Behavior:** + +- If provider not authenticated with OpenCode: guides through auth flow +- If already authenticated: sets provider/model in config + +**Examples:** + +```bash +# Interactive setup +btca connect + +# Direct configuration +btca connect -p anthropic -m claude-sonnet-4-20250514 + +# Set globally +btca connect -g -p openai -m gpt-4o +``` + +--- + +#### `btca ask` + +Ask a one-shot question (non-interactive). + +```bash +btca ask [options] +``` + +**Options:** +| Flag | Required | Description | +|------|----------|-------------| +| `-q, --question ` | Yes | The question to ask | +| `-r, --resource ` | No | Resources to query (repeatable) | + +**Behavior:** + +- Streams response to stdout +- Supports `@mentions` in question text (e.g., `@svelte`) +- Uses all resources if none specified + +**Examples:** + +```bash +# Ask about specific resource +btca ask -q "How do I create a store?" -r svelte + +# Ask about multiple resources +btca ask -q "Compare routing in @svelte vs @svelteKit" -r svelte -r svelteKit + +# Use @mentions +btca ask -q "How does @hono handle middleware?" +``` + +--- + +#### `btca serve` + +Start standalone HTTP server. + +```bash +btca serve [options] +``` + +**Options:** +| Flag | Description | +|------|-------------| +| `-p, --port ` | Port to listen on (default: 8080) | + +--- + +#### `btca clear` + +Clear all locally cloned resources. + +```bash +btca clear +``` + +**Behavior:** + +- Removes all cloned repos from data directory +- Returns count of cleared resources + +--- + +### Remote Commands + +All remote commands require prior authentication via `btca remote link`. + +#### `btca remote link` + +Authenticate with BTCA cloud service. + +```bash +btca remote link +``` + +**Behavior:** + +- Opens browser for OAuth flow +- Stores API key in `~/.config/btca/remote-auth.json` +- Validates key works + +--- + +#### `btca remote add ` + +Add resource to remote project and sync to cloud. + +```bash +btca remote add [options] +``` + +**Arguments & Options:** Same as `btca add` + +**Behavior:** + +- Creates/updates `btca.remote.config.jsonc` +- Syncs resource to cloud project + +--- + +#### `btca remote sync` + +Synchronize local config with cloud. + +```bash +btca remote sync [options] +``` + +**Options:** +| Flag | Description | +|------|-------------| +| `--force` | Overwrite cloud on conflicts | + +**Behavior:** + +- Pull: resources in cloud but not local → add to local config +- Push: resources in local but not cloud → add to cloud +- Conflict: resource exists in both with different config → ERROR + - Use `--force` to push local version + - Or update local config to match cloud + +--- + +#### `btca remote wake` + +Pre-warm the cloud sandbox. + +```bash +btca remote wake +``` + +**Behavior:** + +- Starts sandbox if stopped +- Returns when sandbox is ready (~4 seconds) + +--- + +#### `btca remote status` + +Show cloud instance status. + +```bash +btca remote status +``` + +**Output:** + +- Sandbox state (running/stopped) +- Project name +- Resource count +- Last activity + +--- + +#### `btca remote grab ` + +Output full transcript of a cloud thread. + +```bash +btca remote grab [options] +``` + +**Arguments:** +| Argument | Required | Description | +|----------|----------|-------------| +| `threadId` | Yes | Thread ID from web app | + +**Options:** +| Flag | Description | +|------|-------------| +| `--json` | Output as JSON | +| `--markdown` | Output as markdown (default) | + +--- + +#### `btca remote ask` + +Ask a question via cloud sandbox. + +```bash +btca remote ask [options] +``` + +**Options:** Same as `btca ask` + +**Behavior:** + +- Uses remote config for project context +- Hits cloud sandbox instead of local agent + +--- + +## MCP Tools + +MCP endpoint: `https://btca.dev/api/mcp` + +Authentication: Bearer token (API key from web app) + +--- + +### `listResources` + +List available resources for the authenticated user. + +**Parameters:** None + +**Returns:** + +```json +[ + { + "name": "svelte", + "displayName": "Svelte", + "type": "git", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPath": "apps/svelte.dev", + "specialNotes": "Focus on content directory" + } +] +``` + +--- + +### `ask` + +Ask a question about specific resources. + +**Parameters:** +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `question` | string | Yes | The question to ask | +| `resources` | string[] | Yes | Resource names (from listResources) | +| `project` | string | No | Project name (default: "default" or from config) | + +**Returns:** + +```json +{ + "text": "The answer to your question..." +} +``` + +--- + +### `addResource` + +Add a resource to the cloud project. + +**Parameters:** +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `url` | string | Yes | GitHub repository URL | +| `name` | string | Yes | Resource name | +| `branch` | string | No | Branch (default: main) | +| `searchPaths` | string[] | No | Subdirectories to search | +| `notes` | string | No | Special notes for agent | + +**Returns:** + +```json +{ + "ok": true, + "resource": { + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev" + } +} +``` + +--- + +### `sync` + +Sync local config to cloud. + +**Parameters:** +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `config` | string | Yes | Full text of `btca.remote.config.jsonc` | + +**Returns:** + +```json +{ + "ok": true, + "synced": ["svelte", "hono"], + "errors": [] +} +``` + +**Error Response:** + +```json +{ + "ok": false, + "errors": ["Resource 'svelte' has conflicting configuration"], + "synced": [] +} +``` + +--- + +## Configuration Files + +### `btca.config.jsonc` (Local Mode) + +Location: Project root or `~/.config/btca/btca.config.jsonc` (global) + +```jsonc +{ + "$schema": "https://btca.dev/btca.schema.json", + + // Provider configuration (optional, prompted if missing) + "provider": "anthropic", + "model": "claude-sonnet-4-20250514", + + // Where to store cloned repos (optional) + // "local" = .btca/ in project, "global" = ~/.local/share/btca/ + "dataDirectory": ".btca", + + // Resources available in this project + "resources": [ + { + "type": "git", + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPaths": ["apps/svelte.dev"], + "specialNotes": "Focus on content directory for documentation" + }, + { + "type": "local", + "name": "internal-docs", + "path": "./docs", + "specialNotes": "Internal API documentation" + } + ] +} +``` + +**Config Merging:** + +- Global config loaded first +- Project config merged on top +- Project values override global on conflict +- Resources combined (project version wins on name conflict) + +--- + +### `btca.remote.config.jsonc` (Remote Mode) + +Location: Project root + +```jsonc +{ + "$schema": "https://btca.dev/btca.remote.schema.json", + + // Project name (required, unique identifier) + "project": "my-webapp", + + // Model selection (from preset list) + "model": "claude-sonnet", + + // Resources for this project + "resources": [ + { + "type": "git", + "name": "svelte", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main", + "searchPaths": ["apps/svelte.dev"], + "specialNotes": "Focus on content directory" + } + ] +} +``` + +**Available Models:** +| Key | Model | +|-----|-------| +| `claude-haiku` | Claude Haiku | + +--- + +### `~/.config/btca/remote-auth.json` + +Stores remote authentication. + +```json +{ + "apiKey": "btca_xxxxxxxxxxxx", + "linkedAt": 1706000000000 +} +``` + +--- + +## Data Model + +### Convex Schema (Remote/Cloud) + +#### `projects` + +| Field | Type | Description | +| ------------ | --------------- | ------------------------------------- | +| `instanceId` | Id<"instances"> | Reference to user instance | +| `name` | string | Project name (unique per instance) | +| `model` | string | Selected model key | +| `isDefault` | boolean | True for auto-created default project | +| `createdAt` | number | Timestamp | + +**Indexes:** `by_instance`, `by_name` + +--- + +#### `instances` + +| Field | Type | Description | +| -------------------- | ------- | -------------------------------- | +| `clerkId` | string | Clerk user ID | +| `sandboxId` | string? | Daytona sandbox ID | +| `state` | enum | Instance state | +| `serverUrl` | string? | Running server URL | +| `subscriptionPlan` | enum? | pro, free, none | +| `subscriptionStatus` | enum? | active, trialing, canceled, none | +| `storageUsedBytes` | number? | Storage usage | +| `createdAt` | number | Timestamp | + +**States:** `unprovisioned`, `provisioning`, `stopped`, `starting`, `running`, `stopping`, `updating`, `error` + +--- + +#### `threads` + +| Field | Type | Description | +| ---------------- | --------------- | ----------------------- | +| `instanceId` | Id<"instances"> | Reference to instance | +| `projectId` | Id<"projects">? | Reference to project | +| `title` | string? | Thread title | +| `createdAt` | number | Timestamp | +| `lastActivityAt` | number | Last activity timestamp | + +--- + +#### `messages` + +| Field | Type | Description | +| ----------- | ---------------- | ------------------------ | +| `threadId` | Id<"threads"> | Reference to thread | +| `role` | enum | user, assistant, system | +| `content` | string \| chunks | Message content | +| `resources` | string[]? | Resources used | +| `canceled` | boolean? | If response was canceled | +| `createdAt` | number | Timestamp | + +--- + +#### `userResources` + +| Field | Type | Description | +| -------------- | --------------- | --------------------- | +| `instanceId` | Id<"instances"> | Reference to instance | +| `projectId` | Id<"projects">? | Reference to project | +| `name` | string | Resource name | +| `type` | literal | "git" | +| `url` | string | Repository URL | +| `branch` | string | Branch name | +| `searchPath` | string? | Subdirectory path | +| `specialNotes` | string? | Notes for agent | +| `createdAt` | number | Timestamp | + +--- + +#### `cachedResources` + +| Field | Type | Description | +| ------------ | --------------- | --------------------- | +| `instanceId` | Id<"instances"> | Reference to instance | +| `projectId` | Id<"projects">? | Reference to project | +| `name` | string | Resource name | +| `url` | string | Repository URL | +| `branch` | string | Branch name | +| `sizeBytes` | number? | Cache size | +| `cachedAt` | number | When cached | +| `lastUsedAt` | number | Last access time | + +--- + +#### `mcpQuestions` + +| Field | Type | Description | +| ----------- | -------------- | -------------------- | +| `projectId` | Id<"projects"> | Reference to project | +| `question` | string | Question asked | +| `resources` | string[] | Resources queried | +| `answer` | string | Response text | +| `createdAt` | number | Timestamp | + +--- + +#### `apiKeys` + +| Field | Type | Description | +| ------------ | --------------- | ---------------------- | +| `instanceId` | Id<"instances"> | Reference to instance | +| `name` | string | Key name | +| `keyHash` | string | Hashed key | +| `keyPrefix` | string | Key prefix for display | +| `createdAt` | number | Timestamp | +| `lastUsedAt` | number? | Last use time | +| `revokedAt` | number? | Revocation time | +| `usageCount` | number? | Usage counter | + +--- + +#### `globalResources` + +| Field | Type | Description | +| -------------- | ------- | --------------------- | +| `name` | string | Resource name | +| `displayName` | string | Display name | +| `type` | literal | "git" | +| `url` | string | Repository URL | +| `branch` | string | Branch name | +| `searchPath` | string? | Subdirectory path | +| `specialNotes` | string? | Notes for agent | +| `isActive` | boolean | If available to users | + +--- + +## Server API + +Base URL: `http://localhost:` (local) or sandbox URL (remote) + +### Endpoints + +#### `GET /` + +Health check. + +**Response:** `{ "status": "ok" }` + +--- + +#### `GET /config` + +Get current configuration. + +**Response:** + +```json +{ + "provider": "anthropic", + "model": "claude-sonnet-4-20250514", + "dataDirectory": ".btca" +} +``` + +--- + +#### `GET /resources` + +List all configured resources. + +**Response:** + +```json +[ + { + "name": "svelte", + "type": "git", + "url": "https://github.com/sveltejs/svelte.dev", + "branch": "main" + } +] +``` + +--- + +#### `GET /providers` + +List available AI providers. + +**Response:** + +```json +{ + "all": [ + { "id": "anthropic", "models": {...} }, + { "id": "openai", "models": {...} } + ], + "connected": ["anthropic"] +} +``` + +--- + +#### `POST /question` + +Ask a question (non-streaming). + +**Request:** + +```json +{ + "question": "How do I create a store?", + "resources": ["svelte"] +} +``` + +**Response:** + +```json +{ + "answer": "To create a store in Svelte...", + "model": { + "provider": "anthropic", + "model": "claude-sonnet-4-20250514" + } +} +``` + +--- + +#### `POST /question/stream` + +Ask a question (SSE streaming). + +**Request:** Same as `/question` + +**Response:** Server-Sent Events stream + +--- + +#### `PUT /config/model` + +Update model configuration. + +**Request:** + +```json +{ + "provider": "anthropic", + "model": "claude-sonnet-4-20250514" +} +``` + +--- + +#### `POST /config/resources` + +Add a resource. + +**Request:** + +```json +{ + "type": "git", + "name": "hono", + "url": "https://github.com/honojs/website", + "branch": "main", + "searchPath": "docs" +} +``` + +--- + +#### `DELETE /config/resources` + +Remove a resource. + +**Request:** + +```json +{ + "name": "hono" +} +``` + +--- + +#### `POST /clear` + +Clear all cloned resources. + +**Response:** + +```json +{ + "cleared": 5 +} +``` diff --git a/apps/cli/src/client/remote.ts b/apps/cli/src/client/remote.ts new file mode 100644 index 00000000..f0c29522 --- /dev/null +++ b/apps/cli/src/client/remote.ts @@ -0,0 +1,592 @@ +/** + * Remote API client for btca cloud service. + * Communicates with the web app's API endpoints via the MCP protocol. + */ + +// TODO: Change back to 'https://btca.dev' before deploying! +const DEFAULT_REMOTE_URL = 'https://btca.dev'; + +// Local type definitions to avoid circular dependencies +export interface GitResource { + type: 'git'; + name: string; + url: string; + branch: string; + searchPath?: string; + searchPaths?: string[]; + specialNotes?: string; +} + +export interface RemoteConfig { + $schema?: string; + project: string; + model?: 'claude-sonnet' | 'claude-haiku' | 'gpt-4o' | 'gpt-4o-mini'; + resources: GitResource[]; +} + +export interface RemoteClientOptions { + apiKey: string; + baseUrl?: string; +} + +export interface RemoteResource { + name: string; + displayName: string; + type: string; + url: string; + branch: string; + searchPath?: string; + specialNotes?: string; + isGlobal: boolean; +} + +export interface RemoteProject { + _id: string; + name: string; + model?: string; + isDefault: boolean; + createdAt: number; +} + +export interface RemoteInstance { + _id: string; + state: + | 'unprovisioned' + | 'provisioning' + | 'stopped' + | 'starting' + | 'running' + | 'stopping' + | 'updating' + | 'error'; + serverUrl?: string; + btcaVersion?: string; + subscriptionPlan?: 'pro' | 'free' | 'none'; +} + +export interface RemoteThread { + _id: string; + title?: string; + createdAt: number; + lastActivityAt: number; +} + +export interface RemoteMessage { + _id: string; + threadId: string; + role: 'user' | 'assistant' | 'system'; + content: string; + resources?: string[]; + createdAt: number; +} + +export interface McpQuestion { + _id: string; + projectId: string; + question: string; + resources: string[]; + answer: string; + createdAt: number; +} + +export interface SyncResult { + ok: boolean; + errors?: string[]; + synced: string[]; + conflicts?: Array<{ + name: string; + local: GitResource; + remote: RemoteResource; + }>; +} + +export class RemoteApiError extends Error { + readonly statusCode?: number; + readonly hint?: string; + + constructor(message: string, options?: { statusCode?: number; hint?: string }) { + super(message); + this.name = 'RemoteApiError'; + this.statusCode = options?.statusCode; + this.hint = options?.hint; + } +} + +/** + * Remote API client + */ +export class RemoteClient { + private readonly apiKey: string; + private readonly baseUrl: string; + + constructor(options: RemoteClientOptions) { + this.apiKey = options.apiKey; + this.baseUrl = options.baseUrl ?? DEFAULT_REMOTE_URL; + } + + private async request(path: string, options: RequestInit = {}): Promise { + const url = `${this.baseUrl}${path}`; + const headers = new Headers(options.headers); + headers.set('Authorization', `Bearer ${this.apiKey}`); + headers.set('Content-Type', 'application/json'); + + const response = await fetch(url, { + ...options, + headers + }); + + if (!response.ok) { + let errorMessage = `Request failed: ${response.status}`; + let hint: string | undefined; + + try { + const body = (await response.json()) as { error?: string; hint?: string }; + if (body.error) errorMessage = body.error; + if (body.hint) hint = body.hint; + } catch { + // Ignore JSON parse errors + } + + throw new RemoteApiError(errorMessage, { + statusCode: response.status, + hint + }); + } + + const contentType = response.headers.get('content-type') ?? ''; + + if (contentType.includes('text/event-stream')) { + const text = await response.text(); + const dataLine = text.split('\n').find((line) => line.startsWith('data: ')); + if (!dataLine) { + throw new RemoteApiError('No data in SSE response'); + } + return JSON.parse(dataLine.slice(6)) as T; + } + + return response.json() as Promise; + } + + /** + * Validate the API key and get basic info + */ + async validate(): Promise<{ valid: boolean; error?: string }> { + try { + // Use the MCP listResources endpoint to validate + const result = await this.listResources(); + return { valid: result.ok }; + } catch (error) { + if (error instanceof RemoteApiError && error.statusCode === 401) { + return { valid: false, error: 'Invalid or expired API key' }; + } + throw error; + } + } + + /** + * List available resources via MCP + */ + async listResources(project?: string): Promise< + | { + ok: true; + resources: RemoteResource[]; + } + | { + ok: false; + error: string; + } + > { + // MCP uses JSON-RPC, we need to call the tools/call endpoint + const mcpRequest = { + jsonrpc: '2.0', + id: Date.now(), + method: 'tools/call', + params: { + name: 'listResources', + arguments: project ? { project } : {} + } + }; + + const response = await this.request<{ + result?: { + content: Array<{ type: string; text: string }>; + isError?: boolean; + }; + error?: { message: string }; + }>('/api/mcp', { + method: 'POST', + body: JSON.stringify(mcpRequest) + }); + + if (response.error) { + return { ok: false, error: response.error.message }; + } + + if (response.result?.isError) { + const errorText = response.result.content[0]?.text ?? 'Unknown error'; + try { + const parsed = JSON.parse(errorText) as { error?: string }; + return { ok: false, error: parsed.error ?? errorText }; + } catch { + return { ok: false, error: errorText }; + } + } + + const text = response.result?.content[0]?.text ?? '[]'; + try { + const resources = JSON.parse(text) as RemoteResource[]; + return { ok: true, resources }; + } catch { + return { ok: false, error: 'Failed to parse resources' }; + } + } + + /** + * Ask a question via MCP + */ + async ask( + question: string, + resources: string[], + project?: string + ): Promise< + | { + ok: true; + text: string; + } + | { + ok: false; + error: string; + } + > { + const mcpRequest = { + jsonrpc: '2.0', + id: Date.now(), + method: 'tools/call', + params: { + name: 'ask', + arguments: { + question, + resources, + ...(project && { project }) + } + } + }; + + const response = await this.request<{ + result?: { + content: Array<{ type: string; text: string }>; + isError?: boolean; + }; + error?: { message: string }; + }>('/api/mcp', { + method: 'POST', + body: JSON.stringify(mcpRequest) + }); + + if (response.error) { + return { ok: false, error: response.error.message }; + } + + if (response.result?.isError) { + const errorText = response.result.content[0]?.text ?? 'Unknown error'; + try { + const parsed = JSON.parse(errorText) as { error?: string }; + return { ok: false, error: parsed.error ?? errorText }; + } catch { + return { ok: false, error: errorText }; + } + } + + return { + ok: true, + text: response.result?.content[0]?.text ?? '' + }; + } + + /** + * Add a resource via MCP + */ + async addResource( + resource: GitResource, + project?: string + ): Promise< + | { + ok: true; + resource: RemoteResource; + } + | { + ok: false; + error: string; + } + > { + const mcpRequest = { + jsonrpc: '2.0', + id: Date.now(), + method: 'tools/call', + params: { + name: 'addResource', + arguments: { + url: resource.url, + name: resource.name, + branch: resource.branch, + ...(resource.searchPath && { searchPath: resource.searchPath }), + ...(resource.searchPaths && { searchPaths: resource.searchPaths }), + ...(resource.specialNotes && { notes: resource.specialNotes }), + ...(project && { project }) + } + } + }; + + const response = await this.request<{ + result?: { + content: Array<{ type: string; text: string }>; + isError?: boolean; + }; + error?: { message: string }; + }>('/api/mcp', { + method: 'POST', + body: JSON.stringify(mcpRequest) + }); + + if (response.error) { + return { ok: false, error: response.error.message }; + } + + if (response.result?.isError) { + const errorText = response.result.content[0]?.text ?? 'Unknown error'; + try { + const parsed = JSON.parse(errorText) as { error?: string }; + return { ok: false, error: parsed.error ?? errorText }; + } catch { + return { ok: false, error: errorText }; + } + } + + const text = response.result?.content[0]?.text ?? '{}'; + try { + const result = JSON.parse(text) as RemoteResource; + return { ok: true, resource: result }; + } catch { + return { ok: true, resource: { name: resource.name } as RemoteResource }; + } + } + + /** + * Sync config with cloud + */ + async sync(config: RemoteConfig, force?: boolean): Promise { + const mcpRequest = { + jsonrpc: '2.0', + id: Date.now(), + method: 'tools/call', + params: { + name: 'sync', + arguments: { + config: JSON.stringify(config), + force: force ?? false + } + } + }; + + const response = await this.request<{ + result?: { + content: Array<{ type: string; text: string }>; + isError?: boolean; + }; + error?: { message: string }; + }>('/api/mcp', { + method: 'POST', + body: JSON.stringify(mcpRequest) + }); + + if (response.error) { + return { ok: false, errors: [response.error.message], synced: [] }; + } + + if (response.result?.isError) { + const errorText = response.result.content[0]?.text ?? 'Unknown error'; + try { + const parsed = JSON.parse(errorText) as { error?: string }; + return { ok: false, errors: [parsed.error ?? errorText], synced: [] }; + } catch { + return { ok: false, errors: [errorText], synced: [] }; + } + } + + const text = response.result?.content[0]?.text ?? '{}'; + try { + return JSON.parse(text) as SyncResult; + } catch { + return { ok: true, synced: [] }; + } + } + + /** + * Get instance status via the CLI API + */ + async getStatus(project?: string): Promise< + | { + ok: true; + instance: RemoteInstance; + project?: RemoteProject; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ + instance: RemoteInstance; + project?: RemoteProject; + }>(`/api/cli/status${project ? `?project=${encodeURIComponent(project)}` : ''}`); + return { ok: true, ...result }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } + + /** + * Wake the sandbox via the CLI API + */ + async wake(): Promise< + | { + ok: true; + serverUrl: string; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ serverUrl: string }>('/api/cli/wake', { + method: 'POST' + }); + return { ok: true, serverUrl: result.serverUrl }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } + + /** + * Get thread transcript via the CLI API + */ + async getThread(threadId: string): Promise< + | { + ok: true; + thread: RemoteThread; + messages: RemoteMessage[]; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ + thread: RemoteThread; + messages: RemoteMessage[]; + }>(`/api/cli/threads/${threadId}`); + return { ok: true, ...result }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } + + /** + * List threads via the CLI API + */ + async listThreads(project?: string): Promise< + | { + ok: true; + threads: RemoteThread[]; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ threads: RemoteThread[] }>( + `/api/cli/threads${project ? `?project=${encodeURIComponent(project)}` : ''}` + ); + return { ok: true, threads: result.threads }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } + + /** + * List MCP questions for a project via the CLI API + */ + async listQuestions(project: string): Promise< + | { + ok: true; + questions: McpQuestion[]; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ questions: McpQuestion[] }>( + `/api/cli/questions?project=${encodeURIComponent(project)}` + ); + return { ok: true, questions: result.questions }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } + + /** + * List projects via the CLI API + */ + async listProjects(): Promise< + | { + ok: true; + projects: RemoteProject[]; + } + | { + ok: false; + error: string; + } + > { + try { + const result = await this.request<{ projects: RemoteProject[] }>('/api/cli/projects'); + return { ok: true, projects: result.projects }; + } catch (error) { + if (error instanceof RemoteApiError) { + return { ok: false, error: error.message }; + } + throw error; + } + } +} + +/** + * Create a remote client from stored auth + */ +export async function createRemoteClientFromAuth( + loadAuth: () => Promise<{ apiKey: string } | null> +): Promise { + const auth = await loadAuth(); + if (!auth) return null; + return new RemoteClient({ apiKey: auth.apiKey }); +} diff --git a/apps/cli/src/commands/add.ts b/apps/cli/src/commands/add.ts index e47a2d6f..233d23b8 100644 --- a/apps/cli/src/commands/add.ts +++ b/apps/cli/src/commands/add.ts @@ -1,16 +1,10 @@ import { Command } from 'commander'; import * as readline from 'readline'; import path from 'node:path'; -import os from 'node:os'; -import { promises as fs } from 'node:fs'; import { ensureServer } from '../server/manager.ts'; import { addResource, BtcaError } from '../client/index.ts'; import { dim } from '../lib/utils/colors.ts'; -const PROJECT_CONFIG_FILENAME = 'btca.config.jsonc'; -const GLOBAL_CONFIG_DIR = path.join(os.homedir(), '.config', 'btca'); -const GLOBAL_CONFIG_PATH = path.join(GLOBAL_CONFIG_DIR, PROJECT_CONFIG_FILENAME); - interface GitHubUrlParts { owner: string; repo: string; @@ -51,28 +45,6 @@ function normalizeGitHubUrl(url: string): string { return `https://github.com/${parts.owner}/${parts.repo}`; } -/** - * Check if a file exists. - */ -async function fileExists(filePath: string): Promise { - try { - await fs.access(filePath); - return true; - } catch { - return false; - } -} - -/** - * Get the config path based on -g flag. - */ -function getConfigPath(global: boolean): string { - if (global) { - return GLOBAL_CONFIG_PATH; - } - return path.join(process.cwd(), PROJECT_CONFIG_FILENAME); -} - /** * Format an error for display, including hint if available. */ @@ -142,121 +114,356 @@ async function promptRepeated(rl: readline.Interface, itemName: string): Promise return items; } -export const addCommand = new Command('add') - .description('Add a GitHub repository as a resource') - .argument('', 'GitHub repository URL (e.g., https://github.com/owner/repo)') - .option('-g, --global', 'Add to global config instead of project config') - .action(async (url: string, options: { global?: boolean }, command) => { - const globalOpts = command.parent?.opts() as { server?: string; port?: number } | undefined; - const configPath = getConfigPath(options.global ?? false); - - try { - // Validate GitHub URL - const urlParts = parseGitHubUrl(url); - if (!urlParts) { - console.error('Error: Invalid GitHub URL.'); - console.error('Expected format: https://github.com/owner/repo'); - process.exit(1); - } +/** + * Prompt for single selection from a list. + */ +async function promptSelect( + question: string, + options: { label: string; value: T }[] +): Promise { + return new Promise((resolve, reject) => { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); - // Check if config exists - if (!(await fileExists(configPath))) { - if (options.global) { - console.error(`Error: Global config not found at ${GLOBAL_CONFIG_PATH}`); - console.error('Run "btca init" first to create a configuration.'); - } else { - console.error(`Error: No ${PROJECT_CONFIG_FILENAME} found in current directory.`); - console.error('Run "btca init" first to create a project configuration.'); - } - process.exit(1); + console.log(`\n${question}\n`); + options.forEach((opt, idx) => { + console.log(` ${idx + 1}) ${opt.label}`); + }); + console.log(''); + + rl.question('Enter number: ', (answer) => { + rl.close(); + const num = parseInt(answer.trim(), 10); + if (isNaN(num) || num < 1 || num > options.length) { + reject(new Error('Invalid selection')); + return; } + resolve(options[num - 1]!.value); + }); + }); +} + +/** + * Interactive wizard for adding a git resource. + */ +async function addGitResourceWizard( + url: string, + options: { global?: boolean }, + globalOpts: { server?: string; port?: number } | undefined +): Promise { + const urlParts = parseGitHubUrl(url); + if (!urlParts) { + console.error('Error: Invalid GitHub URL.'); + console.error('Expected format: https://github.com/owner/repo'); + process.exit(1); + } - const normalizedUrl = normalizeGitHubUrl(url); + const normalizedUrl = normalizeGitHubUrl(url); - // Start the interactive wizard - console.log('\n--- Add Resource Wizard ---\n'); - console.log(`Repository: ${normalizedUrl}`); + console.log('\n--- Add Git Resource ---\n'); + console.log(`Repository: ${normalizedUrl}`); - const rl = createRl(); + const rl = createRl(); + + try { + // Step 1: URL (prefilled, confirm) + const finalUrl = await promptInput(rl, 'URL', normalizedUrl); + + // Step 2: Name (default = repo name) + const defaultName = urlParts.repo; + const name = await promptInput(rl, 'Name', defaultName); + + // Step 3: Branch (default = main) + const branch = await promptInput(rl, 'Branch', 'main'); + + // Step 4: Search paths (optional, repeated) + const wantSearchPaths = await promptConfirm( + rl, + 'Do you want to add search paths (subdirectories to focus on)?' + ); + const searchPaths = wantSearchPaths ? await promptRepeated(rl, 'Search path') : []; + + // Step 5: Notes (optional) + const notes = await promptInput(rl, 'Notes (optional)'); + + rl.close(); + + // Summary + console.log('\n--- Summary ---\n'); + console.log(` Type: git`); + console.log(` Name: ${name}`); + console.log(` URL: ${finalUrl}`); + console.log(` Branch: ${branch}`); + if (searchPaths.length > 0) { + console.log(` Search: ${searchPaths.join(', ')}`); + } + if (notes) { + console.log(` Notes: ${notes}`); + } + console.log(` Config: ${options.global ? 'global' : 'project'}`); + console.log(''); + + // Confirm + const confirmRl = createRl(); + const confirmed = await promptConfirm(confirmRl, 'Add this resource?'); + confirmRl.close(); + + if (!confirmed) { + console.log('\nCancelled.'); + process.exit(0); + } + + // Add the resource via server + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + const resource = await addResource(server.url, { + type: 'git', + name, + url: finalUrl, + branch, + ...(searchPaths.length === 1 && { searchPath: searchPaths[0] }), + ...(searchPaths.length > 1 && { searchPaths }), + ...(notes && { specialNotes: notes }) + }); + + server.stop(); + + console.log(`\nAdded resource: ${name}`); + if (resource.type === 'git' && resource.url !== finalUrl) { + console.log(` URL normalized: ${resource.url}`); + } + console.log('\nYou can now use this resource:'); + console.log(` btca ask -r ${name} -q "your question"`); + } catch (error) { + rl.close(); + throw error; + } +} + +/** + * Interactive wizard for adding a local resource. + */ +async function addLocalResourceWizard( + localPath: string, + options: { global?: boolean }, + globalOpts: { server?: string; port?: number } | undefined +): Promise { + // Resolve the path + const resolvedPath = path.isAbsolute(localPath) + ? localPath + : path.resolve(process.cwd(), localPath); + + console.log('\n--- Add Local Resource ---\n'); + console.log(`Directory: ${resolvedPath}`); + + const rl = createRl(); + + try { + // Step 1: Path (prefilled, confirm) + const finalPath = await promptInput(rl, 'Path', resolvedPath); + + // Step 2: Name (default = directory name) + const defaultName = path.basename(finalPath); + const name = await promptInput(rl, 'Name', defaultName); + + // Step 3: Notes (optional) + const notes = await promptInput(rl, 'Notes (optional)'); + + rl.close(); + + // Summary + console.log('\n--- Summary ---\n'); + console.log(` Type: local`); + console.log(` Name: ${name}`); + console.log(` Path: ${finalPath}`); + if (notes) { + console.log(` Notes: ${notes}`); + } + console.log(` Config: ${options.global ? 'global' : 'project'}`); + console.log(''); + + // Confirm + const confirmRl = createRl(); + const confirmed = await promptConfirm(confirmRl, 'Add this resource?'); + confirmRl.close(); + + if (!confirmed) { + console.log('\nCancelled.'); + process.exit(0); + } + + // Add the resource via server + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + await addResource(server.url, { + type: 'local', + name, + path: finalPath, + ...(notes && { specialNotes: notes }) + }); + + server.stop(); + + console.log(`\nAdded resource: ${name}`); + console.log('\nYou can now use this resource:'); + console.log(` btca ask -r ${name} -q "your question"`); + } catch (error) { + rl.close(); + throw error; + } +} + +export const addCommand = new Command('add') + .description('Add a resource (git repository or local directory)') + .argument('[url-or-path]', 'GitHub repository URL or local directory path') + .option('-g, --global', 'Add to global config instead of project config') + .option('-n, --name ', 'Resource name') + .option('-b, --branch ', 'Git branch (default: main)') + .option('-s, --search-path ', 'Search paths within repo (can specify multiple)') + .option('--notes ', 'Special notes for the agent') + .option('-t, --type ', 'Resource type: git or local (auto-detected if not specified)') + .action( + async ( + urlOrPath: string | undefined, + options: { + global?: boolean; + name?: string; + branch?: string; + searchPath?: string[]; + notes?: string; + type?: string; + }, + command + ) => { + const globalOpts = command.parent?.opts() as { server?: string; port?: number } | undefined; try { - // Step 1: URL (prefilled, confirm) - const finalUrl = await promptInput(rl, 'URL', normalizedUrl); - - // Step 2: Name (default = repo name) - const defaultName = urlParts.repo; - const name = await promptInput(rl, 'Name', defaultName); - - // Step 3: Branch (default = main) - const branch = await promptInput(rl, 'Branch', 'main'); - - // Step 4: Search paths (optional, repeated) - const wantSearchPaths = await promptConfirm( - rl, - 'Do you want to add search paths (subdirectories to focus on)?' - ); - const searchPaths = wantSearchPaths ? await promptRepeated(rl, 'Search path') : []; - - // Step 5: Notes (optional) - const notes = await promptInput(rl, 'Notes (optional)'); - - rl.close(); - - // Summary - console.log('\n--- Summary ---\n'); - console.log(` Name: ${name}`); - console.log(` URL: ${finalUrl}`); - console.log(` Branch: ${branch}`); - if (searchPaths.length > 0) { - console.log(` Search: ${searchPaths.join(', ')}`); + // If no argument provided, start interactive wizard + if (!urlOrPath) { + const resourceType = await promptSelect<'git' | 'local'>( + 'What type of resource do you want to add?', + [ + { label: 'Git repository', value: 'git' }, + { label: 'Local directory', value: 'local' } + ] + ); + + const rl = createRl(); + if (resourceType === 'git') { + const url = await promptInput(rl, 'GitHub URL'); + rl.close(); + if (!url) { + console.error('Error: URL is required.'); + process.exit(1); + } + await addGitResourceWizard(url, options, globalOpts); + } else { + const localPath = await promptInput(rl, 'Local path'); + rl.close(); + if (!localPath) { + console.error('Error: Path is required.'); + process.exit(1); + } + await addLocalResourceWizard(localPath, options, globalOpts); + } + return; } - if (notes) { - console.log(` Notes: ${notes}`); + + // Determine type from argument or explicit flag + let resourceType: 'git' | 'local' = 'git'; + + if (options.type) { + if (options.type !== 'git' && options.type !== 'local') { + console.error('Error: --type must be "git" or "local"'); + process.exit(1); + } + resourceType = options.type as 'git' | 'local'; + } else { + // Auto-detect: if it looks like a URL, it's git; otherwise local + const isUrl = + urlOrPath.startsWith('http://') || + urlOrPath.startsWith('https://') || + urlOrPath.startsWith('github.com/') || + urlOrPath.includes('github.com/'); + resourceType = isUrl ? 'git' : 'local'; } - console.log(` Config: ${options.global ? 'global' : 'project'}`); - console.log(''); - // Confirm - const confirmRl = createRl(); - const confirmed = await promptConfirm(confirmRl, 'Add this resource?'); - confirmRl.close(); + // If all required options provided via flags, skip wizard + if (options.name && resourceType === 'git' && parseGitHubUrl(urlOrPath)) { + // Non-interactive git add + const normalizedUrl = normalizeGitHubUrl(urlOrPath); + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + const searchPaths = options.searchPath ?? []; + const resource = await addResource(server.url, { + type: 'git', + name: options.name, + url: normalizedUrl, + branch: options.branch ?? 'main', + ...(searchPaths.length === 1 && { searchPath: searchPaths[0] }), + ...(searchPaths.length > 1 && { searchPaths }), + ...(options.notes && { specialNotes: options.notes }) + }); + + server.stop(); + + console.log(`Added git resource: ${options.name}`); + if (resource.type === 'git' && resource.url !== normalizedUrl) { + console.log(` URL normalized: ${resource.url}`); + } + return; + } - if (!confirmed) { - console.log('\nCancelled.'); - process.exit(0); + if (options.name && resourceType === 'local') { + // Non-interactive local add + const resolvedPath = path.isAbsolute(urlOrPath) + ? urlOrPath + : path.resolve(process.cwd(), urlOrPath); + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + await addResource(server.url, { + type: 'local', + name: options.name, + path: resolvedPath, + ...(options.notes && { specialNotes: options.notes }) + }); + + server.stop(); + console.log(`Added local resource: ${options.name}`); + return; } - // Add the resource via server - const server = await ensureServer({ - serverUrl: globalOpts?.server, - port: globalOpts?.port, - quiet: true - }); - - const resource = await addResource(server.url, { - type: 'git', - name, - url: finalUrl, - branch, - ...(searchPaths.length === 1 && { searchPath: searchPaths[0] }), - ...(searchPaths.length > 1 && { searchPaths }), - ...(notes && { specialNotes: notes }) - }); - - server.stop(); - - console.log(`\nAdded resource: ${name}`); - if (resource.type === 'git' && resource.url !== finalUrl) { - console.log(` URL normalized: ${resource.url}`); + // Interactive wizard based on type + if (resourceType === 'git') { + await addGitResourceWizard(urlOrPath, options, globalOpts); + } else { + await addLocalResourceWizard(urlOrPath, options, globalOpts); } - console.log('\nYou can now use this resource:'); - console.log(` btca ask -r ${name} -q "your question"`); } catch (error) { - rl.close(); - throw error; + if (error instanceof Error && error.message === 'Invalid selection') { + console.error('\nError: Invalid selection. Please try again.'); + process.exit(1); + } + console.error(formatError(error)); + process.exit(1); } - } catch (error) { - console.error(formatError(error)); - process.exit(1); } - }); + ); diff --git a/apps/cli/src/commands/connect.ts b/apps/cli/src/commands/connect.ts new file mode 100644 index 00000000..0ba0b44d --- /dev/null +++ b/apps/cli/src/commands/connect.ts @@ -0,0 +1,284 @@ +import { Command } from 'commander'; +import * as readline from 'readline'; +import { spawn } from 'bun'; +import { ensureServer } from '../server/manager.ts'; +import { createClient, getProviders, updateModel, BtcaError } from '../client/index.ts'; +import { dim, green } from '../lib/utils/colors.ts'; + +// Recommended models for quick selection +const RECOMMENDED_MODELS = [ + { provider: 'opencode', model: 'claude-haiku-4-5', label: 'Claude Haiku 4.5 (fast, cheap)' }, + { provider: 'opencode', model: 'claude-sonnet-4', label: 'Claude Sonnet 4 (balanced)' }, + { provider: 'opencode', model: 'claude-sonnet-4-5', label: 'Claude Sonnet 4.5 (powerful)' }, + { provider: 'opencode', model: 'gpt-5.1', label: 'GPT 5.1 (balanced)' }, + { provider: 'opencode', model: 'gpt-5.2', label: 'GPT 5.2 (latest)' }, + { provider: 'opencode', model: 'gemini-3-flash', label: 'Gemini 3 Flash (fast)' } +]; + +// Provider display info +const PROVIDER_INFO: Record = { + opencode: { label: 'OpenCode Zen (free tier available)', requiresAuth: false }, + anthropic: { label: 'Anthropic (Claude)', requiresAuth: true }, + openai: { label: 'OpenAI (GPT)', requiresAuth: true }, + google: { label: 'Google (Gemini)', requiresAuth: true }, + 'google-vertex': { label: 'Google Vertex AI', requiresAuth: true }, + 'amazon-bedrock': { label: 'Amazon Bedrock', requiresAuth: true }, + azure: { label: 'Azure OpenAI', requiresAuth: true }, + groq: { label: 'Groq', requiresAuth: true }, + mistral: { label: 'Mistral', requiresAuth: true }, + xai: { label: 'xAI (Grok)', requiresAuth: true }, + cohere: { label: 'Cohere', requiresAuth: true }, + deepinfra: { label: 'DeepInfra', requiresAuth: true }, + cerebras: { label: 'Cerebras', requiresAuth: true }, + perplexity: { label: 'Perplexity', requiresAuth: true }, + togetherai: { label: 'Together AI', requiresAuth: true } +}; + +/** + * Format an error for display, including hint if available. + */ +function formatError(error: unknown): string { + if (error instanceof BtcaError) { + let output = `Error: ${error.message}`; + if (error.hint) { + output += `\n\nHint: ${error.hint}`; + } + return output; + } + return `Error: ${error instanceof Error ? error.message : String(error)}`; +} + +/** + * Create a readline interface for prompts. + */ +function createRl(): readline.Interface { + return readline.createInterface({ + input: process.stdin, + output: process.stdout + }); +} + +/** + * Prompt for input with a default value. + */ +async function promptInput( + rl: readline.Interface, + question: string, + defaultValue?: string +): Promise { + return new Promise((resolve) => { + const defaultHint = defaultValue ? ` ${dim(`(${defaultValue})`)}` : ''; + rl.question(`${question}${defaultHint}: `, (answer) => { + const value = answer.trim(); + resolve(value || defaultValue || ''); + }); + }); +} + +/** + * Prompt for single selection from a list. + */ +async function promptSelect( + question: string, + options: { label: string; value: T }[] +): Promise { + return new Promise((resolve, reject) => { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + console.log(`\n${question}\n`); + options.forEach((opt, idx) => { + console.log(` ${idx + 1}) ${opt.label}`); + }); + console.log(''); + + rl.question('Enter number: ', (answer) => { + rl.close(); + const num = parseInt(answer.trim(), 10); + if (isNaN(num) || num < 1 || num > options.length) { + reject(new Error('Invalid selection')); + return; + } + resolve(options[num - 1]!.value); + }); + }); +} + +/** + * Run opencode auth flow for a provider. + */ +async function runOpencodeAuth(providerId: string): Promise { + console.log(`\nOpening browser for ${providerId} authentication...`); + console.log('(This requires OpenCode CLI to be installed)\n'); + + try { + const proc = spawn(['opencode', 'auth', '--provider', providerId], { + stdin: 'inherit', + stdout: 'inherit', + stderr: 'inherit' + }); + + const exitCode = await proc.exited; + return exitCode === 0; + } catch (error) { + console.error( + 'Failed to run opencode auth:', + error instanceof Error ? error.message : String(error) + ); + console.error('\nMake sure OpenCode CLI is installed: npm install -g opencode'); + return false; + } +} + +export const connectCommand = new Command('connect') + .description('Configure the AI provider and model') + .option('-g, --global', 'Save to global config instead of project config') + .option('-p, --provider ', 'Provider ID (e.g., "opencode", "anthropic")') + .option('-m, --model ', 'Model ID (e.g., "claude-haiku-4-5")') + .action(async (options: { global?: boolean; provider?: string; model?: string }, command) => { + const globalOpts = command.parent?.opts() as { server?: string; port?: number } | undefined; + + try { + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + const client = createClient(server.url); + const providers = await getProviders(client); + + // If both provider and model specified via flags, just set them + if (options.provider && options.model) { + const result = await updateModel(server.url, options.provider, options.model); + console.log(`Model updated: ${result.provider}/${result.model}`); + + // Warn if provider not connected + if (options.provider !== 'opencode' && !providers.connected.includes(options.provider)) { + console.warn(`\nWarning: Provider "${options.provider}" is not connected.`); + console.warn('Run "opencode auth" to configure credentials.'); + } + + server.stop(); + return; + } + + // Interactive mode + console.log('\n--- Configure AI Provider ---\n'); + + // Step 1: Choose between quick setup or custom + const setupMode = await promptSelect<'quick' | 'custom'>('How would you like to configure?', [ + { label: 'Quick setup (recommended models)', value: 'quick' }, + { label: 'Custom (choose provider and model)', value: 'custom' } + ]); + + let provider: string; + let model: string; + + if (setupMode === 'quick') { + // Show recommended models + const modelChoice = await promptSelect( + 'Select a model:', + RECOMMENDED_MODELS.map((m) => ({ + label: `${m.label}`, + value: `${m.provider}:${m.model}` + })) + ); + + const [p, m] = modelChoice.split(':'); + provider = p!; + model = m!; + } else { + // Custom setup - choose provider first + const providerOptions: { label: string; value: string }[] = []; + + // Add connected providers first + for (const connectedId of providers.connected) { + const info = PROVIDER_INFO[connectedId]; + const label = info + ? `${info.label} ${green('(connected)')}` + : `${connectedId} ${green('(connected)')}`; + providerOptions.push({ label, value: connectedId }); + } + + // Add unconnected providers + for (const p of providers.all) { + if (!providers.connected.includes(p.id)) { + const info = PROVIDER_INFO[p.id]; + const label = info ? info.label : p.id; + providerOptions.push({ label, value: p.id }); + } + } + + provider = await promptSelect('Select a provider:', providerOptions); + + // Check if provider needs authentication + const isConnected = providers.connected.includes(provider); + const info = PROVIDER_INFO[provider]; + + if (!isConnected && info?.requiresAuth) { + console.log(`\nProvider "${provider}" requires authentication.`); + const shouldAuth = await promptSelect<'yes' | 'no'>( + 'Would you like to authenticate now?', + [ + { label: 'Yes, authenticate now', value: 'yes' }, + { label: "No, I'll do it later", value: 'no' } + ] + ); + + if (shouldAuth === 'yes') { + const success = await runOpencodeAuth(provider); + if (!success) { + console.warn( + '\nAuthentication may have failed. You can try again later with: opencode auth' + ); + } + } else { + console.warn(`\nNote: You'll need to authenticate before using this provider.`); + console.warn('Run: opencode auth --provider ' + provider); + } + } + + // Get model from user + const rl = createRl(); + + // Show available models if we know them + const providerInfo = providers.all.find((p) => p.id === provider); + if (providerInfo?.models && Object.keys(providerInfo.models).length > 0) { + const modelIds = Object.keys(providerInfo.models); + console.log(`\nAvailable models for ${provider}:`); + modelIds.slice(0, 10).forEach((id) => console.log(` - ${id}`)); + if (modelIds.length > 10) { + console.log(` ... and ${modelIds.length - 10} more`); + } + } + + model = await promptInput(rl, 'Enter model ID'); + rl.close(); + + if (!model) { + console.error('Error: Model ID is required.'); + server.stop(); + process.exit(1); + } + } + + // Update the model + const result = await updateModel(server.url, provider, model); + console.log(`\nModel configured: ${result.provider}/${result.model}`); + + // Show where it was saved + console.log(`\nSaved to: ${options.global ? 'global' : 'project'} config`); + + server.stop(); + } catch (error) { + if (error instanceof Error && error.message === 'Invalid selection') { + console.error('\nError: Invalid selection. Please try again.'); + process.exit(1); + } + console.error(formatError(error)); + process.exit(1); + } + }); diff --git a/apps/cli/src/commands/init.ts b/apps/cli/src/commands/init.ts index 05fc454c..a519a739 100644 --- a/apps/cli/src/commands/init.ts +++ b/apps/cli/src/commands/init.ts @@ -4,10 +4,10 @@ import path from 'node:path'; import * as readline from 'readline'; const PROJECT_CONFIG_FILENAME = 'btca.config.jsonc'; -const CONFIG_SCHEMA_URL = 'https://btca.schema.json'; +const CONFIG_SCHEMA_URL = 'https://btca.dev/btca.schema.json'; const DEFAULT_MODEL = 'claude-haiku-4-5'; const DEFAULT_PROVIDER = 'opencode'; -const MCP_DASHBOARD_URL = 'https://btca.dev/app/settings/mcp/'; +const MCP_DASHBOARD_URL = 'https://btca.dev/app/settings'; // AGENTS.md section templates const MCP_AGENTS_SECTION = `## Better Context MCP @@ -27,15 +27,13 @@ Use Better Context MCP for documentation/resource questions when you need source **Common errors** - "Invalid resources" → re-run \`listResources\` and use exact names. - "Instance is provisioning / error state" → wait or retry after a minute. -- "Missing or invalid Authorization header" → MCP auth is invalid; fix it in \`https://btca.dev/app/settings/mcp/\`. +- "Missing or invalid Authorization header" → MCP auth is invalid; fix it in \`https://btca.dev/app/settings/\`. `; const CLI_AGENTS_SECTION = `## btca When you need up-to-date information about technologies used in this project, use btca to query source repositories directly. -**Available resources**: (run \`btca config resources list\` to see configured resources) - ### Usage Ask a question about one or more resources: @@ -59,26 +57,34 @@ btca ask --question "@svelte @tailwind How do I style components?" ### Interactive Mode -Start a chat session for deeper exploration: +Launch the TUI for interactive chat: \`\`\`bash -btca chat --resource svelte --resource effect +btca \`\`\` -Or use the TUI: +Then use \`@mentions\` to reference resources (e.g., "@svelte How do I create a store?") + +### Managing Resources \`\`\`bash -btca -\`\`\` +# Add a git resource +btca add https://github.com/owner/repo -Then use \`@mentions\` to reference resources (e.g., "@svelte How do I create a store?") +# Add a local directory +btca add ./docs + +# Remove a resource +btca remove +\`\`\` ### Configuration -This project's btca resources are configured in \`btca.config.jsonc\` at the project root. To modify: +This project's btca resources are configured in \`btca.config.jsonc\` at the project root. To change the AI model: -- Edit the config file directly, or -- Use \`btca config resources add/remove\` commands +\`\`\`bash +btca connect +\`\`\` `; type SetupType = 'mcp' | 'cli'; @@ -343,7 +349,7 @@ async function handleCliSetup(cwd: string, configPath: string, force?: boolean): console.log('\n--- Setup Complete (CLI) ---\n'); console.log('Next steps:'); console.log(' 1. Add resources: btca add https://github.com/owner/repo'); - console.log(' Or: btca config resources add -n -t git -u '); console.log(' 2. Ask a question: btca ask -r -q "your question"'); + console.log(' 3. Or launch the TUI: btca'); console.log("\nRun 'btca --help' for more options."); } diff --git a/apps/cli/src/commands/remote.ts b/apps/cli/src/commands/remote.ts new file mode 100644 index 00000000..53e868d1 --- /dev/null +++ b/apps/cli/src/commands/remote.ts @@ -0,0 +1,890 @@ +import { Command } from 'commander'; +import * as readline from 'readline'; +import { + RemoteClient, + RemoteApiError, + type GitResource, + type RemoteConfig +} from '../client/remote.ts'; +import { dim, green, red, yellow, bold } from '../lib/utils/colors.ts'; + +// ───────────────────────────────────────────────────────────────────────────── +// Config Constants (duplicated to avoid server import) +// ───────────────────────────────────────────────────────────────────────────── + +const GLOBAL_CONFIG_DIR = '~/.config/btca'; +const REMOTE_AUTH_FILENAME = 'remote-auth.json'; +const REMOTE_CONFIG_FILENAME = 'btca.remote.config.jsonc'; +const REMOTE_CONFIG_SCHEMA_URL = 'https://btca.dev/btca.remote.schema.json'; + +const expandHome = (filePath: string): string => { + const home = process.env.HOME ?? process.env.USERPROFILE ?? ''; + if (filePath.startsWith('~/')) return home + filePath.slice(1); + return filePath; +}; + +// ───────────────────────────────────────────────────────────────────────────── +// Auth Helpers +// ───────────────────────────────────────────────────────────────────────────── + +interface RemoteAuth { + apiKey: string; + linkedAt: number; +} + +async function getAuthPath(): Promise { + return `${expandHome(GLOBAL_CONFIG_DIR)}/${REMOTE_AUTH_FILENAME}`; +} + +async function loadAuth(): Promise { + const authPath = await getAuthPath(); + try { + const content = await Bun.file(authPath).text(); + return JSON.parse(content) as RemoteAuth; + } catch { + return null; + } +} + +async function saveAuth(auth: RemoteAuth): Promise { + const authPath = await getAuthPath(); + const configDir = authPath.slice(0, authPath.lastIndexOf('/')); + + await Bun.write(`${configDir}/.keep`, ''); + await Bun.write(authPath, JSON.stringify(auth, null, 2)); +} + +async function deleteAuth(): Promise { + const authPath = await getAuthPath(); + try { + const fs = await import('node:fs/promises'); + await fs.unlink(authPath); + } catch { + // Ignore if file doesn't exist + } +} + +async function requireAuth(): Promise { + const auth = await loadAuth(); + if (!auth) { + console.error(red('Not authenticated with remote.')); + console.error(`Run ${bold('btca remote link')} to authenticate.`); + process.exit(1); + } + return new RemoteClient({ apiKey: auth.apiKey }); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Remote Config Helpers +// ───────────────────────────────────────────────────────────────────────────── + +function getConfigPath(cwd: string = process.cwd()): string { + return `${cwd}/${REMOTE_CONFIG_FILENAME}`; +} + +async function loadConfig(cwd: string = process.cwd()): Promise { + const configPath = getConfigPath(cwd); + try { + const content = await Bun.file(configPath).text(); + const stripped = stripJsonComments(content); + return JSON.parse(stripped) as RemoteConfig; + } catch { + return null; + } +} + +function stripJsonComments(content: string): string { + let result = ''; + let inString = false; + let inLineComment = false; + let inBlockComment = false; + let i = 0; + + while (i < content.length) { + const char = content[i]; + const next = content[i + 1]; + + if (inLineComment) { + if (char === '\n') { + inLineComment = false; + result += char; + } + i++; + continue; + } + + if (inBlockComment) { + if (char === '*' && next === '/') { + inBlockComment = false; + i += 2; + continue; + } + i++; + continue; + } + + if (inString) { + result += char; + if (char === '\\' && i + 1 < content.length) { + result += content[i + 1]; + i += 2; + continue; + } + if (char === '"') { + inString = false; + } + i++; + continue; + } + + if (char === '"') { + inString = true; + result += char; + i++; + continue; + } + + if (char === '/' && next === '/') { + inLineComment = true; + i += 2; + continue; + } + + if (char === '/' && next === '*') { + inBlockComment = true; + i += 2; + continue; + } + + result += char; + i++; + } + + return result.replace(/,(\s*[}\]])/g, '$1'); +} + +async function saveConfig(config: RemoteConfig, cwd: string = process.cwd()): Promise { + const configPath = getConfigPath(cwd); + const toSave = { + $schema: REMOTE_CONFIG_SCHEMA_URL, + ...config + }; + await Bun.write(configPath, JSON.stringify(toSave, null, '\t')); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Prompt Helpers +// ───────────────────────────────────────────────────────────────────────────── + +function createRl(): readline.Interface { + return readline.createInterface({ + input: process.stdin, + output: process.stdout + }); +} + +async function promptInput( + rl: readline.Interface, + question: string, + defaultValue?: string +): Promise { + return new Promise((resolve) => { + const defaultHint = defaultValue ? ` ${dim(`(${defaultValue})`)}` : ''; + rl.question(`${question}${defaultHint}: `, (answer) => { + const value = answer.trim(); + resolve(value || defaultValue || ''); + }); + }); +} + +async function promptConfirm(rl: readline.Interface, question: string): Promise { + return new Promise((resolve) => { + rl.question(`${question} ${dim('(y/n)')}: `, (answer) => { + resolve(answer.trim().toLowerCase() === 'y'); + }); + }); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Error Formatting +// ───────────────────────────────────────────────────────────────────────────── + +function formatError(error: unknown): string { + if (error instanceof RemoteApiError) { + let output = `Error: ${error.message}`; + if (error.hint) { + output += `\n\nHint: ${error.hint}`; + } + return output; + } + return `Error: ${error instanceof Error ? error.message : String(error)}`; +} + +// ───────────────────────────────────────────────────────────────────────────── +// Subcommands +// ───────────────────────────────────────────────────────────────────────────── + +/** + * btca remote link - Authenticate with remote instance + */ +const linkCommand = new Command('link') + .description('Authenticate with the btca cloud service') + .option('--key ', 'API key (if you have one already)') + .action(async (options: { key?: string }) => { + try { + const existingAuth = await loadAuth(); + if (existingAuth) { + const rl = createRl(); + const overwrite = await promptConfirm( + rl, + 'You are already authenticated. Do you want to re-authenticate?' + ); + rl.close(); + if (!overwrite) { + console.log('Cancelled.'); + return; + } + } + + let apiKey = options.key; + + if (!apiKey) { + console.log('\n--- btca Remote Authentication ---\n'); + console.log('To authenticate, you need an API key from the btca web app.'); + console.log(`\n1. Go to ${bold('https://btca.dev/app/settings?tab=mcp')}`); + console.log('2. Create a new API key'); + console.log('3. Copy the key and paste it below\n'); + + const rl = createRl(); + apiKey = await promptInput(rl, 'API Key'); + rl.close(); + + if (!apiKey) { + console.error(red('API key is required.')); + process.exit(1); + } + } + + // Validate the API key + console.log('\nValidating API key...'); + const client = new RemoteClient({ apiKey }); + const validation = await client.validate(); + + if (!validation.valid) { + console.error(red(`\nAuthentication failed: ${validation.error}`)); + process.exit(1); + } + + // Save the auth + await saveAuth({ + apiKey, + linkedAt: Date.now() + }); + + console.log(green('\nAuthentication successful!')); + console.log(`\nYou can now use remote commands:`); + console.log(` ${dim('btca remote status')} - Check instance status`); + console.log(` ${dim('btca remote ask')} - Ask questions via cloud`); + console.log(` ${dim('btca remote sync')} - Sync local config with cloud`); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote unlink - Remove authentication + */ +const unlinkCommand = new Command('unlink') + .description('Remove authentication with the btca cloud service') + .action(async () => { + try { + const auth = await loadAuth(); + if (!auth) { + console.log('Not currently authenticated.'); + return; + } + + await deleteAuth(); + console.log(green('Successfully unlinked from btca cloud.')); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote status - Show sandbox status + */ +const statusCommand = new Command('status') + .description('Show sandbox and project status') + .action(async () => { + try { + const client = await requireAuth(); + const config = await loadConfig(); + + console.log('\n--- btca Remote Status ---\n'); + + const result = await client.getStatus(config?.project); + + if (!result.ok) { + console.error(red(`Error: ${result.error}`)); + process.exit(1); + } + + const { instance, project } = result; + + // Instance status + const stateColors: Record string> = { + running: green, + stopped: yellow, + error: red, + provisioning: yellow, + starting: yellow, + stopping: yellow + }; + const stateColor = stateColors[instance.state] ?? dim; + console.log(`Sandbox: ${stateColor(instance.state)}`); + + if (instance.subscriptionPlan) { + console.log(`Plan: ${instance.subscriptionPlan}`); + } + + if (instance.btcaVersion) { + console.log(`Version: ${instance.btcaVersion}`); + } + + // Project info + if (project) { + console.log(`\nProject: ${bold(project.name)}${project.isDefault ? ' (default)' : ''}`); + if (project.model) { + console.log(`Model: ${project.model}`); + } + } else if (config?.project) { + console.log(`\nLocal project: ${bold(config.project)} (not synced)`); + } + + // Local config info + if (config) { + console.log(`\nLocal resources: ${config.resources.length}`); + } else { + console.log(dim(`\nNo local remote config found (${REMOTE_CONFIG_FILENAME})`)); + } + + console.log(''); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote wake - Pre-warm the sandbox + */ +const wakeCommand = new Command('wake') + .description('Pre-warm the cloud sandbox') + .action(async () => { + try { + const client = await requireAuth(); + + console.log('Waking sandbox...'); + const result = await client.wake(); + + if (!result.ok) { + console.error(red(`Error: ${result.error}`)); + process.exit(1); + } + + console.log(green('Sandbox is ready!')); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +interface GitHubUrlParts { + owner: string; + repo: string; +} + +function parseGitHubUrl(url: string): GitHubUrlParts | null { + const patterns = [ + /^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(\.git)?$/, + /^github\.com\/([^/]+)\/([^/]+?)(\.git)?$/ + ]; + + for (const pattern of patterns) { + const match = url.match(pattern); + if (match) { + return { owner: match[1]!, repo: match[2]! }; + } + } + + return null; +} + +function normalizeGitHubUrl(url: string): string { + const parts = parseGitHubUrl(url); + if (!parts) return url; + return `https://github.com/${parts.owner}/${parts.repo}`; +} + +async function promptRepeated(rl: readline.Interface, itemName: string): Promise { + const items: string[] = []; + + console.log(`\nEnter ${itemName} one at a time. Press Enter with empty input when done.`); + + while (true) { + const value = await promptInput(rl, ` ${itemName} ${items.length + 1}`); + if (!value) break; + items.push(value); + } + + return items; +} + +async function addRemoteResourceWizard(url: string): Promise { + const urlParts = parseGitHubUrl(url); + if (!urlParts) { + console.error(red('Invalid GitHub URL.')); + console.error('Expected format: https://github.com/owner/repo'); + process.exit(1); + } + + const normalizedUrl = normalizeGitHubUrl(url); + + console.log('\n--- Add Remote Resource ---\n'); + console.log(`Repository: ${normalizedUrl}`); + + const rl = createRl(); + + try { + const finalUrl = await promptInput(rl, 'URL', normalizedUrl); + + const defaultName = urlParts.repo; + const name = await promptInput(rl, 'Name', defaultName); + + const branch = await promptInput(rl, 'Branch', 'main'); + + const wantSearchPaths = await promptConfirm( + rl, + 'Do you want to add search paths (subdirectories to focus on)?' + ); + const searchPaths = wantSearchPaths ? await promptRepeated(rl, 'Search path') : []; + + const notes = await promptInput(rl, 'Notes (optional)'); + + rl.close(); + + console.log('\n--- Summary ---\n'); + console.log(` Type: git`); + console.log(` Name: ${name}`); + console.log(` URL: ${finalUrl}`); + console.log(` Branch: ${branch}`); + if (searchPaths.length > 0) { + console.log(` Search: ${searchPaths.join(', ')}`); + } + if (notes) { + console.log(` Notes: ${notes}`); + } + console.log(''); + + const confirmRl = createRl(); + const confirmed = await promptConfirm(confirmRl, 'Add this resource?'); + confirmRl.close(); + + if (!confirmed) { + console.log('\nCancelled.'); + process.exit(0); + } + + const client = await requireAuth(); + let config = await loadConfig(); + + if (!config) { + const projectRl = createRl(); + const projectName = await promptInput(projectRl, 'Project name for remote config'); + projectRl.close(); + + if (!projectName) { + console.error(red('Project name is required.')); + process.exit(1); + } + + config = { + project: projectName, + model: 'claude-sonnet', + resources: [] + }; + } + + const resource: GitResource = { + type: 'git', + name, + url: finalUrl, + branch, + ...(searchPaths.length === 1 && { searchPath: searchPaths[0] }), + ...(searchPaths.length > 1 && { searchPaths }), + ...(notes && { specialNotes: notes }) + }; + + if (config.resources.some((r) => r.name === name)) { + console.error(red(`Resource "${name}" already exists in config.`)); + process.exit(1); + } + + config.resources.push(resource); + await saveConfig(config); + + console.log(`\nAdded "${name}" to local config.`); + + console.log('Syncing to cloud...'); + const syncResult = await client.addResource(resource, config.project); + + if (!syncResult.ok) { + console.error(yellow(`Warning: Failed to sync to cloud: ${syncResult.error}`)); + console.error('The resource has been added to your local config.'); + console.error(`Run ${bold('btca remote sync')} to try again.`); + } else { + console.log(green(`Successfully added and synced "${name}"!`)); + } + + console.log('\nYou can now use this resource:'); + console.log(` ${dim(`btca remote ask -q "your question" -r ${name}`)}`); + } catch (error) { + rl.close(); + throw error; + } +} + +/** + * btca remote add - Add resource to remote config and sync + */ +const addCommand = new Command('add') + .description('Add a resource to remote config and sync to cloud') + .argument('[url]', 'GitHub repository URL') + .option('-n, --name ', 'Resource name') + .option('-b, --branch ', 'Git branch (default: main)') + .option('-s, --search-path ', 'Search paths within repo') + .option('--notes ', 'Special notes for the agent') + .action( + async ( + url: string | undefined, + options: { + name?: string; + branch?: string; + searchPath?: string[]; + notes?: string; + } + ) => { + try { + if (!url) { + const rl = createRl(); + const inputUrl = await promptInput(rl, 'GitHub URL'); + rl.close(); + + if (!inputUrl) { + console.error(red('URL is required.')); + process.exit(1); + } + + await addRemoteResourceWizard(inputUrl); + return; + } + + const urlParts = parseGitHubUrl(url); + if (!urlParts) { + console.error(red('Invalid GitHub URL.')); + console.error('Expected format: https://github.com/owner/repo'); + process.exit(1); + } + + if (options.name) { + const normalizedUrl = normalizeGitHubUrl(url); + const client = await requireAuth(); + let config = await loadConfig(); + + if (!config) { + const rl = createRl(); + const projectName = await promptInput(rl, 'Project name for remote config'); + rl.close(); + + if (!projectName) { + console.error(red('Project name is required.')); + process.exit(1); + } + + config = { + project: projectName, + model: 'claude-sonnet', + resources: [] + }; + } + + const resource: GitResource = { + type: 'git', + name: options.name, + url: normalizedUrl, + branch: options.branch ?? 'main', + ...(options.searchPath?.length === 1 && { searchPath: options.searchPath[0] }), + ...(options.searchPath && + options.searchPath.length > 1 && { searchPaths: options.searchPath }), + ...(options.notes && { specialNotes: options.notes }) + }; + + if (config.resources.some((r) => r.name === options.name)) { + console.error(red(`Resource "${options.name}" already exists in config.`)); + process.exit(1); + } + + config.resources.push(resource); + await saveConfig(config); + + console.log(`Added "${options.name}" to local config.`); + + console.log('Syncing to cloud...'); + const syncResult = await client.addResource(resource, config.project); + + if (!syncResult.ok) { + console.error(yellow(`Warning: Failed to sync to cloud: ${syncResult.error}`)); + console.error('The resource has been added to your local config.'); + console.error(`Run ${bold('btca remote sync')} to try again.`); + } else { + console.log(green(`Successfully added and synced "${options.name}"!`)); + } + return; + } + + await addRemoteResourceWizard(url); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + } + ); + +/** + * btca remote sync - Sync local config with cloud + */ +const syncCommand = new Command('sync') + .description('Sync local remote config with cloud') + .option('--force', 'Force push local config, overwriting cloud on conflicts') + .action(async (options: { force?: boolean }) => { + try { + const client = await requireAuth(); + const config = await loadConfig(); + + if (!config) { + console.error(red(`No remote config found (${REMOTE_CONFIG_FILENAME}).`)); + console.error('Create a remote config first or use `btca remote add` to start.'); + process.exit(1); + } + + console.log(`Syncing project "${config.project}"...`); + + const result = await client.sync(config, options.force); + + if (!result.ok) { + if (result.conflicts && result.conflicts.length > 0) { + console.error(red('\nConflicts detected:')); + for (const conflict of result.conflicts) { + console.error(`\n ${bold(conflict.name)}:`); + console.error(` Local: ${conflict.local.url} @ ${conflict.local.branch}`); + console.error(` Remote: ${conflict.remote.url} @ ${conflict.remote.branch}`); + } + console.error( + `\nUse ${bold('--force')} to overwrite cloud config, or update local config to match.` + ); + } else if (result.errors) { + for (const err of result.errors) { + console.error(red(`Error: ${err}`)); + } + } + process.exit(1); + } + + if (result.synced.length > 0) { + console.log(green('\nSynced resources:')); + for (const name of result.synced) { + console.log(` - ${name}`); + } + } else { + console.log(green('\nAlready in sync!')); + } + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote ask - Ask a question via cloud + */ +const askCommand = new Command('ask') + .description('Ask a question via the cloud sandbox') + .requiredOption('-q, --question ', 'Question to ask') + .option('-r, --resource ', 'Resources to query') + .action(async (options: { question: string; resource?: string[] }) => { + try { + const client = await requireAuth(); + const config = await loadConfig(); + + // Get available resources + const resourcesResult = await client.listResources(config?.project); + if (!resourcesResult.ok) { + console.error(red(`Error: ${resourcesResult.error}`)); + process.exit(1); + } + + const available = resourcesResult.resources; + if (available.length === 0) { + console.error(red('No resources available.')); + console.error('Add resources first with `btca remote add`.'); + process.exit(1); + } + + // Determine which resources to use + let resources: string[]; + if (options.resource && options.resource.length > 0) { + // Validate requested resources + const invalid = options.resource.filter( + (r) => !available.some((a) => a.name.toLowerCase() === r.toLowerCase()) + ); + if (invalid.length > 0) { + console.error(red(`Invalid resources: ${invalid.join(', ')}`)); + console.error(`Available: ${available.map((a) => a.name).join(', ')}`); + process.exit(1); + } + resources = options.resource; + } else { + // Use all available resources + resources = available.map((a) => a.name); + } + + console.log('Asking...\n'); + + const result = await client.ask(options.question, resources, config?.project); + + if (!result.ok) { + console.error(red(`Error: ${result.error}`)); + process.exit(1); + } + + console.log(result.text); + console.log(''); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote grab - Output thread transcript + */ +const grabCommand = new Command('grab') + .description('Output the full transcript of a thread') + .argument('', 'Thread ID to fetch') + .option('--json', 'Output as JSON') + .option('--markdown', 'Output as markdown (default)') + .action(async (threadId: string, options: { json?: boolean; markdown?: boolean }) => { + try { + const client = await requireAuth(); + + const result = await client.getThread(threadId); + + if (!result.ok) { + console.error(red(`Error: ${result.error}`)); + process.exit(1); + } + + const { thread, messages } = result; + + if (options.json) { + console.log(JSON.stringify({ thread, messages }, null, 2)); + return; + } + + // Markdown output (default) + console.log(`# ${thread.title ?? 'Untitled Thread'}\n`); + console.log(`Thread ID: ${thread._id}`); + console.log(`Created: ${new Date(thread.createdAt).toISOString()}\n`); + console.log('---\n'); + + for (const msg of messages) { + const roleLabel = + msg.role === 'user' + ? '**User**' + : msg.role === 'assistant' + ? '**Assistant**' + : '**System**'; + console.log(`${roleLabel}:\n`); + console.log(msg.content); + console.log('\n---\n'); + } + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +/** + * btca remote init - Initialize a remote config file + */ +const initCommand = new Command('init') + .description('Initialize a remote config file in the current directory') + .option('-p, --project ', 'Project name') + .action(async (options: { project?: string }) => { + try { + const existingConfig = await loadConfig(); + if (existingConfig) { + console.error(red(`Remote config already exists (${REMOTE_CONFIG_FILENAME}).`)); + process.exit(1); + } + + let projectName = options.project; + + if (!projectName) { + const rl = createRl(); + projectName = await promptInput(rl, 'Project name'); + rl.close(); + } + + if (!projectName) { + console.error(red('Project name is required.')); + process.exit(1); + } + + const config: RemoteConfig = { + project: projectName, + model: 'claude-haiku', + resources: [] + }; + + await saveConfig(config); + + console.log(green(`Created ${REMOTE_CONFIG_FILENAME}`)); + console.log(`\nNext steps:`); + console.log(` 1. ${dim('btca remote link')} - Authenticate (if not already)`); + console.log(` 2. ${dim('btca remote add ')} - Add resources`); + console.log(` 3. ${dim('btca remote sync')} - Sync to cloud`); + } catch (error) { + console.error(formatError(error)); + process.exit(1); + } + }); + +// ───────────────────────────────────────────────────────────────────────────── +// Main Remote Command +// ───────────────────────────────────────────────────────────────────────────── + +export const remoteCommand = new Command('remote') + .description('Manage btca cloud service (remote mode)') + .addCommand(linkCommand) + .addCommand(unlinkCommand) + .addCommand(statusCommand) + .addCommand(wakeCommand) + .addCommand(addCommand) + .addCommand(syncCommand) + .addCommand(askCommand) + .addCommand(grabCommand) + .addCommand(initCommand); diff --git a/apps/cli/src/commands/remove.ts b/apps/cli/src/commands/remove.ts new file mode 100644 index 00000000..5e020fa5 --- /dev/null +++ b/apps/cli/src/commands/remove.ts @@ -0,0 +1,130 @@ +import { Command } from 'commander'; +import * as readline from 'readline'; +import { ensureServer } from '../server/manager.ts'; +import { createClient, getResources, removeResource, BtcaError } from '../client/index.ts'; +import { dim } from '../lib/utils/colors.ts'; + +/** + * Resource definition types matching server schema. + */ +interface GitResource { + type: 'git'; + name: string; + url: string; + branch: string; + searchPath?: string; + searchPaths?: string[]; + specialNotes?: string; +} + +interface LocalResource { + type: 'local'; + name: string; + path: string; + specialNotes?: string; +} + +type ResourceDefinition = GitResource | LocalResource; + +const isGitResource = (r: ResourceDefinition): r is GitResource => r.type === 'git'; + +/** + * Interactive single-select prompt for resources. + * Displays resource name with dimmed path/URL. + */ +async function selectSingleResource(resources: ResourceDefinition[]): Promise { + return new Promise((resolve, reject) => { + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + console.log('\nSelect a resource to remove:\n'); + resources.forEach((r, idx) => { + const location = isGitResource(r) ? r.url : r.path; + console.log(` ${idx + 1}. ${r.name} ${dim(`(${location})`)}`); + }); + console.log(''); + + rl.question('Enter number: ', (answer) => { + rl.close(); + const num = parseInt(answer.trim(), 10); + if (isNaN(num) || num < 1 || num > resources.length) { + reject(new Error('Invalid selection')); + return; + } + resolve(resources[num - 1]!.name); + }); + }); +} + +/** + * Format an error for display, including hint if available. + */ +function formatError(error: unknown): string { + if (error instanceof BtcaError) { + let output = `Error: ${error.message}`; + if (error.hint) { + output += `\n\nHint: ${error.hint}`; + } + return output; + } + return `Error: ${error instanceof Error ? error.message : String(error)}`; +} + +export const removeCommand = new Command('remove') + .description('Remove a resource from the configuration') + .argument('[name]', 'Resource name to remove') + .option( + '-g, --global', + 'Remove from global config (not implemented yet - removes from active config)' + ) + .action(async (name: string | undefined, options: { global?: boolean }, command) => { + const globalOpts = command.parent?.opts() as { server?: string; port?: number } | undefined; + + try { + const server = await ensureServer({ + serverUrl: globalOpts?.server, + port: globalOpts?.port, + quiet: true + }); + + const client = createClient(server.url); + const { resources } = await getResources(client); + + if (resources.length === 0) { + console.log('No resources configured.'); + server.stop(); + return; + } + + const names = resources.map((r) => r.name); + + // Use provided name or show interactive picker + let resourceName: string; + if (name) { + resourceName = name; + } else { + resourceName = await selectSingleResource(resources as ResourceDefinition[]); + } + + if (!names.includes(resourceName)) { + console.error(`Error: Resource "${resourceName}" not found.`); + console.error(`\nAvailable resources: ${names.join(', ')}`); + server.stop(); + process.exit(1); + } + + await removeResource(server.url, resourceName); + console.log(`Removed resource: ${resourceName}`); + + server.stop(); + } catch (error) { + if (error instanceof Error && error.message === 'Invalid selection') { + console.error('\nError: Invalid selection. Please try again.'); + process.exit(1); + } + console.error(formatError(error)); + process.exit(1); + } + }); diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index f4f001a8..329fd722 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,10 +1,11 @@ import { Command } from 'commander'; import { addCommand } from './commands/add.ts'; import { askCommand } from './commands/ask.ts'; -import { chatCommand } from './commands/chat.ts'; -import { configCommand } from './commands/config.ts'; import { clearCommand } from './commands/clear.ts'; +import { connectCommand } from './commands/connect.ts'; import { initCommand } from './commands/init.ts'; +import { removeCommand } from './commands/remove.ts'; +import { remoteCommand } from './commands/remote.ts'; import { serveCommand } from './commands/serve.ts'; import { launchTui } from './commands/tui.ts'; import { launchRepl } from './commands/repl.ts'; @@ -28,14 +29,24 @@ const program = new Command() 'Use simple REPL mode instead of TUI (useful for Windows or minimal terminals)' ); +// Resource management commands program.addCommand(addCommand); +program.addCommand(removeCommand); + +// Query commands program.addCommand(askCommand); -program.addCommand(chatCommand); -program.addCommand(configCommand); -program.addCommand(clearCommand); + +// Configuration commands +program.addCommand(connectCommand); program.addCommand(initCommand); + +// Utility commands +program.addCommand(clearCommand); program.addCommand(serveCommand); +// Remote mode commands +program.addCommand(remoteCommand); + // Default action (no subcommand) → launch TUI or REPL program.action(async (options: { server?: string; port?: number; tui?: boolean }) => { try { diff --git a/apps/cli/src/lib/utils/colors.ts b/apps/cli/src/lib/utils/colors.ts index 44666b2e..7bab4fff 100644 --- a/apps/cli/src/lib/utils/colors.ts +++ b/apps/cli/src/lib/utils/colors.ts @@ -6,3 +6,23 @@ * Wrap text in ANSI dim escape codes for muted/secondary text. */ export const dim = (text: string): string => `\x1b[2m${text}\x1b[22m`; + +/** + * Wrap text in ANSI green escape codes for success/connected status. + */ +export const green = (text: string): string => `\x1b[32m${text}\x1b[39m`; + +/** + * Wrap text in ANSI yellow escape codes for warnings. + */ +export const yellow = (text: string): string => `\x1b[33m${text}\x1b[39m`; + +/** + * Wrap text in ANSI red escape codes for errors. + */ +export const red = (text: string): string => `\x1b[31m${text}\x1b[39m`; + +/** + * Wrap text in ANSI bold escape codes. + */ +export const bold = (text: string): string => `\x1b[1m${text}\x1b[22m`; diff --git a/apps/sandbox/README.md b/apps/sandbox/README.md index 468d73ad..4ea18ab9 100644 --- a/apps/sandbox/README.md +++ b/apps/sandbox/README.md @@ -139,3 +139,80 @@ The workflow is: 1. `snapshot.ts` creates a Daytona snapshot with all dependencies baked in 2. `index.ts` creates sandboxes from that snapshot for fast startup 3. Each sandbox runs btca serve and exposes it via Daytona's preview URLs + +## Sandbox File System Structure + +Each sandbox has a specific directory structure for storing configurations and cached resources. + +### Current Structure (Default Project Only) + +For backward compatibility, the "default" project uses the root-level config: + +``` +/root/ +├── btca.config.jsonc # Default project configuration +└── .local/share/btca/ + └── resources/ # Default project's cached git repos + ├── svelte/ # Cloned svelte repo + ├── daytona/ # Cloned daytona repo + └── ... +``` + +### Project-Aware Structure (Future) + +When projects are fully implemented, each project will have its own isolated directory: + +``` +/root/ +├── btca.config.jsonc # Default project config (backward compat) +├── .local/share/btca/ +│ ├── resources/ # Default project resources +│ │ ├── svelte/ +│ │ └── daytona/ +│ └── projects/ # Project-specific directories +│ ├── my-project/ +│ │ ├── btca.config.jsonc # Project-specific config +│ │ └── resources/ # Project-specific cached repos +│ │ └── svelte/ # Can be different repo than default's svelte +│ └── another-project/ +│ ├── btca.config.jsonc +│ └── resources/ +│ └── react/ +``` + +### Key Design Decisions + +1. **Project Isolation**: Each project has its own `resources/` directory. This allows + different projects to have resources with the same name (e.g., "svelte") pointing to + different repos without conflict. + +2. **Backward Compatibility**: The "default" project uses the root-level config at + `/root/btca.config.jsonc` and resources at `/root/.local/share/btca/resources/`. + This ensures existing MCP integrations continue working without changes. + +3. **Convex vs Sandbox Separation**: + - **Projects and threads** are managed in Convex (database) + - **The sandbox** is ephemeral and stateless from a conversation perspective + - When answering a question, Convex passes the project name to the sandbox + - The sandbox uses the project name to determine which config/resources directory to use + +4. **Config Upload Flow**: When waking a sandbox or syncing resources, Convex: + - Generates the `btca.config.jsonc` for each project + - Uploads it to the appropriate project directory + - The btca server reads the config from the correct location based on the `project` parameter + +### Request Flow + +1. MCP client calls `ask` with optional `project` parameter +2. Convex validates the request and wakes the sandbox if needed +3. Convex forwards the request to the sandbox's `/question` endpoint with the project name +4. The sandbox's btca server: + - Uses `/root/btca.config.jsonc` if project is "default" or not specified + - Uses `/root/.local/share/btca/projects/{project}/btca.config.jsonc` otherwise +5. Resources are cloned/cached to the appropriate project's `resources/` directory + +### Implementation Status + +- [x] **Phase 3**: Schema supports projects, MCP accepts `project` parameter +- [ ] **Phase 5**: Sandbox btca server reads project-specific configs +- [ ] **Phase 5**: Resource syncing creates project-specific directories diff --git a/apps/sandbox/package.json b/apps/sandbox/package.json index 9f4bb8da..e384e20a 100644 --- a/apps/sandbox/package.json +++ b/apps/sandbox/package.json @@ -21,6 +21,10 @@ }, "module": "src/index.ts", "type": "module", + "exports": { + ".": "./src/index.ts", + "./shared": "./src/shared.ts" + }, "scripts": { "check": "tsgo --noEmit", "dev": "bun --watch src/index.ts", diff --git a/apps/sandbox/src/shared.ts b/apps/sandbox/src/shared.ts index 83abbd6d..3eee43a5 100644 --- a/apps/sandbox/src/shared.ts +++ b/apps/sandbox/src/shared.ts @@ -1,2 +1,2 @@ // Snapshot name for btca sandbox -export const BTCA_SNAPSHOT_NAME = 'btca-app-sandbox'; +export const BTCA_SNAPSHOT_NAME = 'btca-app-sandbox-2'; diff --git a/apps/server/package.json b/apps/server/package.json index 8d4186a6..29dd1158 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -30,7 +30,9 @@ "exports": { ".": "./src/index.ts", "./stream": "./src/stream/index.ts", - "./stream/types": "./src/stream/types.ts" + "./stream/types": "./src/stream/types.ts", + "./config/remote": "./src/config/remote.ts", + "./resources/schema": "./src/resources/schema.ts" }, "files": [ "src", @@ -49,9 +51,26 @@ "prettier": "^3.7.4" }, "dependencies": { + "@ai-sdk/amazon-bedrock": "^4.0.30", + "@ai-sdk/anthropic": "^3.0.23", + "@ai-sdk/azure": "^3.0.18", + "@ai-sdk/cerebras": "^2.0.20", + "@ai-sdk/cohere": "^3.0.11", + "@ai-sdk/deepinfra": "^2.0.19", + "@ai-sdk/google": "^3.0.13", + "@ai-sdk/google-vertex": "^4.0.28", + "@ai-sdk/groq": "^3.0.15", + "@ai-sdk/mistral": "^3.0.12", + "@ai-sdk/openai": "^3.0.18", + "@ai-sdk/openai-compatible": "^2.0.18", + "@ai-sdk/perplexity": "^3.0.11", + "@ai-sdk/togetherai": "^2.0.20", + "@ai-sdk/xai": "^3.0.34", "@btca/shared": "workspace:*", "@opencode-ai/sdk": "^1.1.28", + "ai": "^6.0.49", "hono": "^4.7.11", + "opencode-ai": "^1.1.36", "zod": "^3.25.76" } } diff --git a/apps/server/src/agent/agent.test.ts b/apps/server/src/agent/agent.test.ts index feb99567..0401aaf4 100644 --- a/apps/server/src/agent/agent.test.ts +++ b/apps/server/src/agent/agent.test.ts @@ -103,8 +103,8 @@ describe('Agent', () => { } expect(events.length).toBeGreaterThan(0); - // Should have received some message.part.updated events - const textEvents = events.filter((e) => e.type === 'message.part.updated'); + // Should have received some text-delta events + const textEvents = events.filter((e) => e.type === 'text-delta'); expect(textEvents.length).toBeGreaterThan(0); }, 60000); }); diff --git a/apps/server/src/agent/index.ts b/apps/server/src/agent/index.ts index 0bf6f02c..7d839467 100644 --- a/apps/server/src/agent/index.ts +++ b/apps/server/src/agent/index.ts @@ -1,2 +1,3 @@ export { Agent } from './service.ts'; +export { AgentLoop } from './loop.ts'; export type { AgentResult, OcEvent, SessionState } from './types.ts'; diff --git a/apps/server/src/agent/loop.ts b/apps/server/src/agent/loop.ts new file mode 100644 index 00000000..be70aa08 --- /dev/null +++ b/apps/server/src/agent/loop.ts @@ -0,0 +1,295 @@ +/** + * Custom Agent Loop + * Uses AI SDK's streamText with custom tools + */ +import { streamText, tool, stepCountIs, type ModelMessage } from 'ai'; + +import { Model } from '../providers/index.ts'; +import { ReadTool, GrepTool, GlobTool, ListTool } from '../tools/index.ts'; + +export namespace AgentLoop { + // Event types for streaming + export type AgentEvent = + | { type: 'text-delta'; text: string } + | { type: 'tool-call'; toolName: string; input: unknown } + | { type: 'tool-result'; toolName: string; output: string } + | { + type: 'finish'; + finishReason: string; + usage?: { inputTokens?: number; outputTokens?: number }; + } + | { type: 'error'; error: Error }; + + // Options for the agent loop + export type Options = { + providerId: string; + modelId: string; + collectionPath: string; + agentInstructions: string; + question: string; + maxSteps?: number; + }; + + // Result type + export type Result = { + answer: string; + model: { provider: string; model: string }; + events: AgentEvent[]; + }; + + /** + * Build the system prompt for the agent + */ + function buildSystemPrompt(agentInstructions: string): string { + return [ + 'You are btca, an expert documentation search agent.', + 'Your job is to answer questions by searching through the collection of resources.', + '', + 'You have access to the following tools:', + '- read: Read file contents with line numbers', + '- grep: Search file contents using regex patterns', + '- glob: Find files matching glob patterns', + '- list: List directory contents', + '', + 'Guidelines:', + '- Use glob to find relevant files first, then read them', + '- Use grep to search for specific code patterns or text', + '- Always cite the source files in your answers', + '- Be concise but thorough in your responses', + '- If you cannot find the answer, say so clearly', + '', + agentInstructions + ].join('\n'); + } + + /** + * Create the tools for the agent + */ + function createTools(basePath: string) { + return { + read: tool({ + description: 'Read the contents of a file. Returns the file contents with line numbers.', + inputSchema: ReadTool.Parameters, + execute: async (params: ReadTool.ParametersType) => { + const result = await ReadTool.execute(params, { basePath }); + return result.output; + } + }), + + grep: tool({ + description: + 'Search for a regex pattern in file contents. Returns matching lines with file paths and line numbers.', + inputSchema: GrepTool.Parameters, + execute: async (params: GrepTool.ParametersType) => { + const result = await GrepTool.execute(params, { basePath }); + return result.output; + } + }), + + glob: tool({ + description: + 'Find files matching a glob pattern (e.g. "**/*.ts", "src/**/*.js"). Returns a list of matching file paths sorted by modification time.', + inputSchema: GlobTool.Parameters, + execute: async (params: GlobTool.ParametersType) => { + const result = await GlobTool.execute(params, { basePath }); + return result.output; + } + }), + + list: tool({ + description: + 'List the contents of a directory. Returns files and subdirectories with their types.', + inputSchema: ListTool.Parameters, + execute: async (params: ListTool.ParametersType) => { + const result = await ListTool.execute(params, { basePath }); + return result.output; + } + }) + }; + } + + /** + * Get initial context by listing the collection directory + */ + async function getInitialContext(collectionPath: string): Promise { + const result = await ListTool.execute({ path: '.' }, { basePath: collectionPath }); + return `Collection contents:\n${result.output}`; + } + + /** + * Run the agent loop and return the final answer + */ + export async function run(options: Options): Promise { + const { + providerId, + modelId, + collectionPath, + agentInstructions, + question, + maxSteps = 40 + } = options; + + // Get the model + const model = await Model.getModel(providerId, modelId); + + // Get initial context + const initialContext = await getInitialContext(collectionPath); + + // Build messages + const messages: ModelMessage[] = [ + { + role: 'user', + content: `${initialContext}\n\nQuestion: ${question}` + } + ]; + + // Create tools + const tools = createTools(collectionPath); + + // Collect events + const events: AgentEvent[] = []; + let fullText = ''; + + // Run streamText with tool execution + const result = streamText({ + model, + system: buildSystemPrompt(agentInstructions), + messages, + tools, + stopWhen: stepCountIs(maxSteps) + }); + + // Process the stream + for await (const part of result.fullStream) { + switch (part.type) { + case 'text-delta': + fullText += part.text; + events.push({ type: 'text-delta', text: part.text }); + break; + + case 'tool-call': + events.push({ + type: 'tool-call', + toolName: part.toolName, + input: part.input + }); + break; + + case 'tool-result': + events.push({ + type: 'tool-result', + toolName: part.toolName, + output: typeof part.output === 'string' ? part.output : JSON.stringify(part.output) + }); + break; + + case 'finish': + events.push({ + type: 'finish', + finishReason: part.finishReason ?? 'unknown', + usage: { + inputTokens: part.totalUsage?.inputTokens, + outputTokens: part.totalUsage?.outputTokens + } + }); + break; + + case 'error': + events.push({ + type: 'error', + error: part.error instanceof Error ? part.error : new Error(String(part.error)) + }); + break; + } + } + + return { + answer: fullText.trim(), + model: { provider: providerId, model: modelId }, + events + }; + } + + /** + * Run the agent loop and stream events + */ + export async function* stream(options: Options): AsyncGenerator { + const { + providerId, + modelId, + collectionPath, + agentInstructions, + question, + maxSteps = 40 + } = options; + + // Get the model + const model = await Model.getModel(providerId, modelId); + + // Get initial context + const initialContext = await getInitialContext(collectionPath); + + // Build messages + const messages: ModelMessage[] = [ + { + role: 'user', + content: `${initialContext}\n\nQuestion: ${question}` + } + ]; + + // Create tools + const tools = createTools(collectionPath); + + // Run streamText with tool execution + const result = streamText({ + model, + system: buildSystemPrompt(agentInstructions), + messages, + tools, + stopWhen: stepCountIs(maxSteps) + }); + + // Stream events + for await (const part of result.fullStream) { + switch (part.type) { + case 'text-delta': + yield { type: 'text-delta', text: part.text }; + break; + + case 'tool-call': + yield { + type: 'tool-call', + toolName: part.toolName, + input: part.input + }; + break; + + case 'tool-result': + yield { + type: 'tool-result', + toolName: part.toolName, + output: typeof part.output === 'string' ? part.output : JSON.stringify(part.output) + }; + break; + + case 'finish': + yield { + type: 'finish', + finishReason: part.finishReason ?? 'unknown', + usage: { + inputTokens: part.totalUsage?.inputTokens, + outputTokens: part.totalUsage?.outputTokens + } + }; + break; + + case 'error': + yield { + type: 'error', + error: part.error instanceof Error ? part.error : new Error(String(part.error)) + }; + break; + } + } + } +} diff --git a/apps/server/src/agent/service.ts b/apps/server/src/agent/service.ts index 566f90be..8ae2892a 100644 --- a/apps/server/src/agent/service.ts +++ b/apps/server/src/agent/service.ts @@ -1,20 +1,25 @@ +/** + * Agent Service + * Refactored to use custom AI SDK loop instead of spawning OpenCode instances + */ import { createOpencode, createOpencodeClient, type Config as OpenCodeConfig, - type OpencodeClient, - type Event as OcEvent + type OpencodeClient } from '@opencode-ai/sdk'; import { Config } from '../config/index.ts'; import { CommonHints, type TaggedErrorOptions } from '../errors.ts'; import { Metrics } from '../metrics/index.ts'; +import { Auth, getSupportedProviders } from '../providers/index.ts'; import type { CollectionResult } from '../collections/types.ts'; import type { AgentResult, TrackedInstance, InstanceInfo } from './types.ts'; +import { AgentLoop } from './loop.ts'; export namespace Agent { // ───────────────────────────────────────────────────────────────────────────── - // Instance Registry - tracks OpenCode instances for cleanup + // Instance Registry - tracks OpenCode instances for cleanup (backward compat) // ───────────────────────────────────────────────────────────────────────────── const instanceRegistry = new Map(); @@ -45,12 +50,10 @@ export namespace Agent { return deleted; }; - const updateInstanceActivity = (id: string): void => { - const instance = instanceRegistry.get(id); - if (instance) { - instance.lastActivity = new Date(); - } - }; + // ───────────────────────────────────────────────────────────────────────────── + // Error Classes + // ───────────────────────────────────────────────────────────────────────────── + export class AgentError extends Error { readonly _tag = 'AgentError'; override readonly cause?: unknown; @@ -115,11 +118,15 @@ export namespace Agent { } } + // ───────────────────────────────────────────────────────────────────────────── + // Service Type + // ───────────────────────────────────────────────────────────────────────────── + export type Service = { - askStream: (args: { - collection: CollectionResult; - question: string; - }) => Promise<{ stream: AsyncIterable; model: { provider: string; model: string } }>; + askStream: (args: { collection: CollectionResult; question: string }) => Promise<{ + stream: AsyncIterable; + model: { provider: string; model: string }; + }>; ask: (args: { collection: CollectionResult; question: string }) => Promise; @@ -140,6 +147,10 @@ export namespace Agent { closeAllInstances: () => Promise<{ closed: number }>; }; + // ───────────────────────────────────────────────────────────────────────────── + // OpenCode Instance Creation (for backward compatibility with getOpencodeInstance) + // ───────────────────────────────────────────────────────────────────────────── + const buildOpenCodeConfig = (args: { agentInstructions: string; providerId?: string; @@ -207,44 +218,6 @@ export namespace Agent { }; }; - // Gateway providers route to other providers' models, so model validation - // should be skipped for these. The gateway itself handles model resolution. - const GATEWAY_PROVIDERS = ['opencode'] as const; - - const isGatewayProvider = (providerId: string): boolean => - GATEWAY_PROVIDERS.includes(providerId as (typeof GATEWAY_PROVIDERS)[number]); - - const validateProviderAndModel = async ( - client: OpencodeClient, - providerId: string, - modelId: string - ) => { - const response = await client.provider.list().catch(() => null); - if (!response?.data) return; - - type ProviderInfo = { id: string; models: Record }; - const data = response.data as { all: ProviderInfo[]; connected: string[] }; - - const { all, connected } = data; - const provider = all.find((p) => p.id === providerId); - if (!provider) - throw new InvalidProviderError({ providerId, availableProviders: all.map((p) => p.id) }); - if (!connected.includes(providerId)) { - throw new ProviderNotConnectedError({ providerId, connectedProviders: connected }); - } - - // Skip model validation for gateway providers - they route to other providers' models - if (isGatewayProvider(providerId)) { - Metrics.info('agent.validation.gateway_skip', { providerId, modelId }); - return; - } - - const modelIds = Object.keys(provider.models); - if (!modelIds.includes(modelId)) { - throw new InvalidModelError({ providerId, modelId, availableModels: modelIds }); - } - }; - const createOpencodeInstance = async (args: { collectionPath: string; ocConfig: OpenCodeConfig; @@ -256,14 +229,17 @@ export namespace Agent { const maxAttempts = 10; for (let attempt = 0; attempt < maxAttempts; attempt++) { const port = Math.floor(Math.random() * 3000) + 3000; - const created = await createOpencode({ port, config: args.ocConfig }).catch((err: any) => { - if (err?.cause instanceof Error && err.cause.stack?.includes('port')) return null; - throw new AgentError({ - message: 'Failed to create OpenCode instance', - hint: 'This may be a temporary issue. Try running the command again.', - cause: err - }); - }); + const created = await createOpencode({ port, config: args.ocConfig }).catch( + (err: unknown) => { + const error = err as { cause?: Error }; + if (error?.cause instanceof Error && error.cause.stack?.includes('port')) return null; + throw new AgentError({ + message: 'Failed to create OpenCode instance', + hint: 'This may be a temporary issue. Try running the command again.', + cause: err + }); + } + ); if (created) { const baseUrl = `http://localhost:${port}`; @@ -281,164 +257,102 @@ export namespace Agent { }); }; - const sessionEvents = async (args: { - sessionID: string; - client: OpencodeClient; - }): Promise> => { - const events = await args.client.event.subscribe().catch((cause: unknown) => { - throw new AgentError({ - message: 'Failed to subscribe to events', - hint: 'This may be a temporary connection issue. Try running the command again.', - cause + // ───────────────────────────────────────────────────────────────────────────── + // Service Factory + // ───────────────────────────────────────────────────────────────────────────── + + export const create = (config: Config.Service): Service => { + /** + * Ask a question and stream the response using the new AI SDK loop + */ + const askStream: Service['askStream'] = async ({ collection, question }) => { + Metrics.info('agent.ask.start', { + provider: config.provider, + model: config.model, + questionLength: question.length }); - }); - async function* gen() { - for await (const event of events.stream) { - const props = event.properties as any; - if (props && 'sessionID' in props && props.sessionID !== args.sessionID) continue; - yield event; - if ( - event.type === 'session.idle' && - (event.properties as any)?.sessionID === args.sessionID - ) - return; + // Validate provider is authenticated + const isAuthed = await Auth.isAuthenticated(config.provider); + if (!isAuthed && config.provider !== 'opencode') { + const authenticated = await Auth.getAuthenticatedProviders(); + throw new ProviderNotConnectedError({ + providerId: config.provider, + connectedProviders: authenticated + }); } - } - - return gen(); - }; - - const extractAnswerFromEvents = (events: readonly OcEvent[]): string => { - const partIds: string[] = []; - const partText = new Map(); - - for (const event of events) { - if (event.type !== 'message.part.updated') continue; - const part: any = (event.properties as any).part; - if (!part || part.type !== 'text') continue; - if (!partIds.includes(part.id)) partIds.push(part.id); - partText.set(part.id, String(part.text ?? '')); - } - return partIds - .map((id) => partText.get(id) ?? '') - .join('') - .trim(); - }; - - export const create = (config: Config.Service): Service => { - const askStream: Service['askStream'] = async ({ collection, question }) => { - const ocConfig = buildOpenCodeConfig({ - agentInstructions: collection.agentInstructions, + // Create a generator that wraps the AgentLoop stream + const eventGenerator = AgentLoop.stream({ providerId: config.provider, - providerTimeoutMs: config.providerTimeoutMs - }); - const { client, server, baseUrl } = await createOpencodeInstance({ + modelId: config.model, collectionPath: collection.path, - ocConfig + agentInstructions: collection.agentInstructions, + question }); - Metrics.info('agent.oc.ready', { baseUrl, collectionPath: collection.path }); + return { + stream: eventGenerator, + model: { provider: config.provider, model: config.model } + }; + }; - try { - try { - await validateProviderAndModel(client, config.provider, config.model); - Metrics.info('agent.validate.ok', { provider: config.provider, model: config.model }); - } catch (cause) { - // Re-throw if it's already one of our specific error types with hints - if ( - cause instanceof InvalidProviderError || - cause instanceof InvalidModelError || - cause instanceof ProviderNotConnectedError - ) { - throw cause; - } - throw new AgentError({ - message: 'Provider/model validation failed', - hint: `Check that provider "${config.provider}" and model "${config.model}" are valid. ${CommonHints.RUN_AUTH}`, - cause - }); - } + /** + * Ask a question and return the complete response + */ + const ask: Service['ask'] = async ({ collection, question }) => { + Metrics.info('agent.ask.start', { + provider: config.provider, + model: config.model, + questionLength: question.length + }); - const session = await client.session.create().catch((cause: unknown) => { - throw new AgentError({ - message: 'Failed to create session', - hint: 'This may be a temporary issue with the OpenCode instance. Try running the command again.', - cause - }); + // Validate provider is authenticated + const isAuthed = await Auth.isAuthenticated(config.provider); + if (!isAuthed && config.provider !== 'opencode') { + const authenticated = await Auth.getAuthenticatedProviders(); + throw new ProviderNotConnectedError({ + providerId: config.provider, + connectedProviders: authenticated }); + } - if (session.error) - throw new AgentError({ - message: 'Failed to create session', - hint: 'The OpenCode server returned an error. Try running the command again.', - cause: session.error - }); - - const sessionID = session.data?.id; - if (!sessionID) { - throw new AgentError({ - message: 'Failed to create session - no session ID returned', - hint: 'This is unexpected. Try running the command again or check for btca updates.', - cause: new Error('Missing session id') - }); - } - Metrics.info('agent.session.created', { sessionID }); - - const eventStream = await sessionEvents({ sessionID, client }); - - Metrics.info('agent.prompt.sent', { sessionID, questionLength: question.length }); - void client.session - .prompt({ - path: { id: sessionID }, - body: { - agent: 'btcaDocsAgent', - model: { providerID: config.provider, modelID: config.model }, - parts: [{ type: 'text', text: question }] - } - }) - .catch((cause: unknown) => { - Metrics.error('agent.prompt.err', { error: Metrics.errorInfo(cause) }); - }); + try { + const result = await AgentLoop.run({ + providerId: config.provider, + modelId: config.model, + collectionPath: collection.path, + agentInstructions: collection.agentInstructions, + question + }); - async function* filtered() { - try { - for await (const event of eventStream) { - if (event.type === 'session.error') { - const props: any = event.properties; - throw new AgentError({ - message: props?.error?.name ?? 'Unknown session error', - hint: 'An error occurred during the AI session. Try running the command again or simplify your question.', - cause: props?.error - }); - } - yield event; - } - } finally { - Metrics.info('agent.session.closed', { sessionID }); - server.close(); - } - } + Metrics.info('agent.ask.complete', { + provider: config.provider, + model: config.model, + answerLength: result.answer.length, + eventCount: result.events.length + }); return { - stream: filtered(), - model: { provider: config.provider, model: config.model } + answer: result.answer, + model: result.model, + events: result.events }; - } catch (cause) { - server.close(); - throw cause; + } catch (error) { + Metrics.error('agent.ask.error', { error: Metrics.errorInfo(error) }); + throw new AgentError({ + message: 'Failed to get response from AI', + hint: 'This may be a temporary issue. Try running the command again.', + cause: error + }); } }; - const ask: Service['ask'] = async ({ collection, question }) => { - const { stream, model } = await askStream({ collection, question }); - const events: OcEvent[] = []; - for await (const event of stream) events.push(event); - return { answer: extractAnswerFromEvents(events), model, events }; - }; - - const getOpencodeInstanceMethod: Service['getOpencodeInstance'] = async ({ collection }) => { + /** + * Get an OpenCode instance URL (backward compatibility) + * This still spawns a full OpenCode instance for clients that need it + */ + const getOpencodeInstance: Service['getOpencodeInstance'] = async ({ collection }) => { const ocConfig = buildOpenCodeConfig({ agentInstructions: collection.agentInstructions, providerId: config.provider, @@ -466,41 +380,32 @@ export namespace Agent { }; }; + /** + * List available providers using local auth data + */ const listProviders: Service['listProviders'] = async () => { - const ocConfig = buildOpenCodeConfig({ - agentInstructions: '', - providerId: config.provider, - providerTimeoutMs: config.providerTimeoutMs - }); - const { client, server } = await createOpencodeInstance({ - collectionPath: process.cwd(), - ocConfig - }); + // Get all supported providers from registry + const supportedProviders = getSupportedProviders(); - try { - const response = await client.provider.list().catch((cause: unknown) => { - throw new AgentError({ - message: 'Failed to fetch provider list', - hint: CommonHints.RUN_AUTH, - cause - }); - }); - if (!response?.data) { - throw new AgentError({ - message: 'Failed to fetch provider list', - hint: CommonHints.RUN_AUTH - }); - } - const data = response.data as { - all: { id: string; models: Record }[]; - connected: string[]; - }; - return { all: data.all, connected: data.connected }; - } finally { - server.close(); - } + // Get authenticated providers from OpenCode's auth storage + const authenticatedProviders = await Auth.getAuthenticatedProviders(); + + // Build the response - we don't have model lists without spawning OpenCode, + // so we return empty models for now + const all = supportedProviders.map((id) => ({ + id, + models: {} as Record + })); + + return { + all, + connected: authenticatedProviders + }; }; + /** + * Close a specific OpenCode instance + */ const closeInstance: Service['closeInstance'] = async (instanceId) => { const instance = instanceRegistry.get(instanceId); if (!instance) { @@ -524,6 +429,9 @@ export namespace Agent { } }; + /** + * List all active OpenCode instances + */ const listInstances: Service['listInstances'] = () => { return Array.from(instanceRegistry.values()).map((instance) => ({ id: instance.id, @@ -534,6 +442,9 @@ export namespace Agent { })); }; + /** + * Close all OpenCode instances + */ const closeAllInstances: Service['closeAllInstances'] = async () => { const instances = Array.from(instanceRegistry.values()); let closed = 0; @@ -560,7 +471,7 @@ export namespace Agent { return { askStream, ask, - getOpencodeInstance: getOpencodeInstanceMethod, + getOpencodeInstance, listProviders, closeInstance, listInstances, diff --git a/apps/server/src/agent/types.ts b/apps/server/src/agent/types.ts index 0ed0f790..d7779ff0 100644 --- a/apps/server/src/agent/types.ts +++ b/apps/server/src/agent/types.ts @@ -1,9 +1,10 @@ import type { Event as OcEvent, OpencodeClient } from '@opencode-ai/sdk'; +import type { AgentLoop } from './loop.ts'; export type AgentResult = { answer: string; model: { provider: string; model: string }; - events: OcEvent[]; + events: AgentLoop.AgentEvent[]; }; export type SessionState = { diff --git a/apps/server/src/config/index.ts b/apps/server/src/config/index.ts index f595db86..7239f0ed 100644 --- a/apps/server/src/config/index.ts +++ b/apps/server/src/config/index.ts @@ -52,8 +52,9 @@ const StoredConfigSchema = z.object({ dataDirectory: z.string().optional(), providerTimeoutMs: z.number().int().positive().optional(), resources: z.array(ResourceDefinitionSchema), - model: z.string(), - provider: z.string() + // Provider and model are optional - defaults are applied when loading + model: z.string().optional(), + provider: z.string().optional() }); type StoredConfig = z.infer; @@ -532,10 +533,10 @@ export namespace Config { return getMergedResources(); }, get model() { - return getActiveConfig().model; + return getActiveConfig().model ?? DEFAULT_MODEL; }, get provider() { - return getActiveConfig().provider; + return getActiveConfig().provider ?? DEFAULT_PROVIDER; }, get providerTimeoutMs() { return getActiveConfig().providerTimeoutMs; diff --git a/apps/server/src/config/remote.ts b/apps/server/src/config/remote.ts new file mode 100644 index 00000000..07f4b2c8 --- /dev/null +++ b/apps/server/src/config/remote.ts @@ -0,0 +1,454 @@ +import { promises as fs } from 'node:fs'; +import path from 'node:path'; + +import { z } from 'zod'; +import { CommonHints, type TaggedErrorOptions } from '../errors.ts'; +import { Metrics } from '../metrics/index.ts'; +import { GitResourceSchema, type GitResource } from '../resources/schema.ts'; + +// ───────────────────────────────────────────────────────────────────────────── +// Remote Config Constants +// ───────────────────────────────────────────────────────────────────────────── + +export const REMOTE_CONFIG_FILENAME = 'btca.remote.config.jsonc'; +export const REMOTE_AUTH_FILENAME = 'remote-auth.json'; +export const REMOTE_CONFIG_SCHEMA_URL = 'https://btca.dev/btca.remote.schema.json'; +export const GLOBAL_CONFIG_DIR = '~/.config/btca'; + +/** + * Available models for remote mode (preset list). + * These are subscription-based and managed by the cloud service. + */ +export const REMOTE_MODELS = [ + { id: 'claude-sonnet', name: 'Claude Sonnet', description: 'Default, balanced performance' }, + { id: 'claude-haiku', name: 'Claude Haiku', description: 'Faster and cheaper' }, + { id: 'gpt-4o', name: 'GPT-4o', description: 'OpenAI GPT-4o' }, + { id: 'gpt-4o-mini', name: 'GPT-4o Mini', description: 'Faster and cheaper' } +] as const; + +export type RemoteModelId = (typeof REMOTE_MODELS)[number]['id']; + +// ───────────────────────────────────────────────────────────────────────────── +// Remote Config Schemas +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Git resource for remote mode (only git resources are supported in remote mode) + */ +const RemoteGitResourceSchema = GitResourceSchema.omit({ type: true }).extend({ + type: z.literal('git').default('git') +}); + +export type RemoteGitResource = z.infer; + +/** + * Remote config file schema (btca.remote.config.jsonc) + */ +export const RemoteConfigSchema = z.object({ + $schema: z.string().optional(), + project: z.string().min(1, 'Project name is required'), + model: z.enum(['claude-sonnet', 'claude-haiku', 'gpt-4o', 'gpt-4o-mini']).optional(), + resources: z.array(RemoteGitResourceSchema).default([]) +}); + +export type RemoteConfig = z.infer; + +/** + * Remote auth storage schema (~/.config/btca/remote-auth.json) + */ +export const RemoteAuthSchema = z.object({ + apiKey: z.string().min(1), + linkedAt: z.number() +}); + +export type RemoteAuth = z.infer; + +// ───────────────────────────────────────────────────────────────────────────── +// Error Class +// ───────────────────────────────────────────────────────────────────────────── + +export class RemoteConfigError extends Error { + readonly _tag = 'RemoteConfigError'; + override readonly cause?: unknown; + readonly hint?: string; + + constructor(args: TaggedErrorOptions) { + super(args.message); + this.cause = args.cause; + this.hint = args.hint; + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Helper Functions +// ───────────────────────────────────────────────────────────────────────────── + +const expandHome = (filePath: string): string => { + const home = process.env.HOME ?? process.env.USERPROFILE ?? ''; + if (filePath.startsWith('~/')) return home + filePath.slice(1); + return filePath; +}; + +const stripJsonc = (content: string): string => { + // Remove // and /* */ comments without touching strings. + let out = ''; + let i = 0; + let inString = false; + let quote: '"' | "'" | null = null; + let escaped = false; + + while (i < content.length) { + const ch = content[i] ?? ''; + const next = content[i + 1] ?? ''; + + if (inString) { + out += ch; + if (escaped) escaped = false; + else if (ch === '\\') escaped = true; + else if (quote && ch === quote) { + inString = false; + quote = null; + } + i += 1; + continue; + } + + if (ch === '/' && next === '/') { + i += 2; + while (i < content.length && content[i] !== '\n') i += 1; + continue; + } + + if (ch === '/' && next === '*') { + i += 2; + while (i < content.length) { + if (content[i] === '*' && content[i + 1] === '/') { + i += 2; + break; + } + i += 1; + } + continue; + } + + if (ch === '"' || ch === "'") { + inString = true; + quote = ch; + out += ch; + i += 1; + continue; + } + + out += ch; + i += 1; + } + + // Remove trailing commas (outside strings). + let normalized = ''; + inString = false; + quote = null; + escaped = false; + i = 0; + + while (i < out.length) { + const ch = out[i] ?? ''; + + if (inString) { + normalized += ch; + if (escaped) escaped = false; + else if (ch === '\\') escaped = true; + else if (quote && ch === quote) { + inString = false; + quote = null; + } + i += 1; + continue; + } + + if (ch === '"' || ch === "'") { + inString = true; + quote = ch; + normalized += ch; + i += 1; + continue; + } + + if (ch === ',') { + let j = i + 1; + while (j < out.length && /\s/.test(out[j] ?? '')) j += 1; + const nextNonWs = out[j] ?? ''; + if (nextNonWs === ']' || nextNonWs === '}') { + i += 1; + continue; + } + } + + normalized += ch; + i += 1; + } + + return normalized.trim(); +}; + +const parseJsonc = (content: string): unknown => JSON.parse(stripJsonc(content)); + +// ───────────────────────────────────────────────────────────────────────────── +// Remote Config Namespace +// ───────────────────────────────────────────────────────────────────────────── + +export namespace RemoteConfigService { + /** + * Get the path to the remote auth file + */ + export function getAuthPath(): string { + return `${expandHome(GLOBAL_CONFIG_DIR)}/${REMOTE_AUTH_FILENAME}`; + } + + /** + * Get the path to the remote config file in the current directory + */ + export function getConfigPath(cwd: string = process.cwd()): string { + return `${cwd}/${REMOTE_CONFIG_FILENAME}`; + } + + /** + * Check if the user is authenticated with remote + */ + export async function isAuthenticated(): Promise { + const authPath = getAuthPath(); + try { + const content = await Bun.file(authPath).text(); + const parsed = JSON.parse(content); + const result = RemoteAuthSchema.safeParse(parsed); + return result.success && !!result.data.apiKey; + } catch { + return false; + } + } + + /** + * Load the remote auth credentials + */ + export async function loadAuth(): Promise { + const authPath = getAuthPath(); + try { + const content = await Bun.file(authPath).text(); + const parsed = JSON.parse(content); + const result = RemoteAuthSchema.safeParse(parsed); + if (!result.success) { + Metrics.error('remote.auth.invalid', { path: authPath, error: result.error.message }); + return null; + } + return result.data; + } catch { + return null; + } + } + + /** + * Save remote auth credentials + */ + export async function saveAuth(auth: RemoteAuth): Promise { + const authPath = getAuthPath(); + const configDir = path.dirname(authPath); + + try { + await fs.mkdir(configDir, { recursive: true }); + await Bun.write(authPath, JSON.stringify(auth, null, 2)); + // Set file permissions to owner-only (600) + await fs.chmod(authPath, 0o600); + Metrics.info('remote.auth.saved', { path: authPath }); + } catch (cause) { + throw new RemoteConfigError({ + message: `Failed to save remote auth to: "${authPath}"`, + hint: 'Check that you have write permissions to the config directory.', + cause + }); + } + } + + /** + * Delete remote auth credentials (unlink) + */ + export async function deleteAuth(): Promise { + const authPath = getAuthPath(); + try { + await fs.unlink(authPath); + Metrics.info('remote.auth.deleted', { path: authPath }); + } catch { + // Ignore if file doesn't exist + } + } + + /** + * Check if a remote config file exists in the current directory + */ + export async function configExists(cwd: string = process.cwd()): Promise { + const configPath = getConfigPath(cwd); + return Bun.file(configPath).exists(); + } + + /** + * Load the remote config from the current directory + */ + export async function loadConfig(cwd: string = process.cwd()): Promise { + const configPath = getConfigPath(cwd); + + try { + const content = await Bun.file(configPath).text(); + const parsed = parseJsonc(content); + const result = RemoteConfigSchema.safeParse(parsed); + + if (!result.success) { + const issues = result.error.issues + .map((i) => ` - ${i.path.join('.')}: ${i.message}`) + .join('\n'); + throw new RemoteConfigError({ + message: `Invalid remote config structure:\n${issues}`, + hint: `${CommonHints.CHECK_CONFIG} Required field: "project" (string).`, + cause: result.error + }); + } + + Metrics.info('remote.config.loaded', { + path: configPath, + project: result.data.project, + resourceCount: result.data.resources.length + }); + + return result.data; + } catch (error) { + if (error instanceof RemoteConfigError) throw error; + // File doesn't exist or can't be read + return null; + } + } + + /** + * Save the remote config to the current directory + */ + export async function saveConfig( + config: RemoteConfig, + cwd: string = process.cwd() + ): Promise { + const configPath = getConfigPath(cwd); + + const toSave = { + $schema: REMOTE_CONFIG_SCHEMA_URL, + ...config + }; + + try { + await Bun.write(configPath, JSON.stringify(toSave, null, '\t')); + Metrics.info('remote.config.saved', { + path: configPath, + project: config.project, + resourceCount: config.resources.length + }); + } catch (cause) { + throw new RemoteConfigError({ + message: `Failed to save remote config to: "${configPath}"`, + hint: 'Check that you have write permissions to the directory.', + cause + }); + } + } + + /** + * Create a new remote config with defaults + */ + export function createDefaultConfig(projectName: string): RemoteConfig { + return { + project: projectName, + model: 'claude-sonnet', + resources: [] + }; + } + + /** + * Add a resource to the remote config + */ + export async function addResource( + resource: GitResource, + cwd: string = process.cwd() + ): Promise { + let config = await loadConfig(cwd); + + if (!config) { + throw new RemoteConfigError({ + message: 'No remote config found in current directory', + hint: `Create a remote config first with "btca remote init" or create a ${REMOTE_CONFIG_FILENAME} file.` + }); + } + + // Check for duplicate + if (config.resources.some((r) => r.name === resource.name)) { + throw new RemoteConfigError({ + message: `Resource "${resource.name}" already exists in remote config`, + hint: `Remove the existing resource first or use a different name.` + }); + } + + config = { + ...config, + resources: [...config.resources, resource] + }; + + await saveConfig(config, cwd); + return config; + } + + /** + * Remove a resource from the remote config + */ + export async function removeResource( + name: string, + cwd: string = process.cwd() + ): Promise { + let config = await loadConfig(cwd); + + if (!config) { + throw new RemoteConfigError({ + message: 'No remote config found in current directory', + hint: `Create a remote config first with "btca remote init" or create a ${REMOTE_CONFIG_FILENAME} file.` + }); + } + + const existingIndex = config.resources.findIndex((r) => r.name === name); + if (existingIndex === -1) { + throw new RemoteConfigError({ + message: `Resource "${name}" not found in remote config`, + hint: `Available resources: ${config.resources.map((r) => r.name).join(', ') || 'none'}` + }); + } + + config = { + ...config, + resources: config.resources.filter((r) => r.name !== name) + }; + + await saveConfig(config, cwd); + return config; + } + + /** + * Update the model in the remote config + */ + export async function updateModel( + model: RemoteModelId, + cwd: string = process.cwd() + ): Promise { + let config = await loadConfig(cwd); + + if (!config) { + throw new RemoteConfigError({ + message: 'No remote config found in current directory', + hint: `Create a remote config first with "btca remote init" or create a ${REMOTE_CONFIG_FILENAME} file.` + }); + } + + config = { ...config, model }; + await saveConfig(config, cwd); + return config; + } +} diff --git a/apps/server/src/providers/auth.ts b/apps/server/src/providers/auth.ts new file mode 100644 index 00000000..1d43fbfd --- /dev/null +++ b/apps/server/src/providers/auth.ts @@ -0,0 +1,139 @@ +/** + * Auth wrapper that reads from OpenCode's auth storage + * Provides credential storage and retrieval for AI providers + * + * OpenCode stores credentials at: + * - Linux: ~/.local/share/opencode/auth.json + * - macOS: ~/.local/share/opencode/auth.json (uses XDG on macOS too) + * - Windows: %APPDATA%/opencode/auth.json + */ +import * as path from 'node:path'; +import * as os from 'node:os'; +import { z } from 'zod'; + +export namespace Auth { + // Auth schema matching OpenCode's format + const ApiKeyAuthSchema = z.object({ + type: z.literal('api'), + key: z.string() + }); + + const OAuthAuthSchema = z.object({ + type: z.literal('oauth'), + access: z.string(), + refresh: z.string(), + expires: z.number() + }); + + const WellKnownAuthSchema = z.object({ + type: z.literal('wellknown') + }); + + const AuthInfoSchema = z.union([ApiKeyAuthSchema, OAuthAuthSchema, WellKnownAuthSchema]); + const AuthFileSchema = z.record(z.string(), AuthInfoSchema); + + export type ApiKeyAuth = z.infer; + export type OAuthAuth = z.infer; + export type WellKnownAuth = z.infer; + export type AuthInfo = z.infer; + + /** + * Get the path to OpenCode's data directory + */ + function getDataPath(): string { + const platform = os.platform(); + + if (platform === 'win32') { + const appdata = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming'); + return path.join(appdata, 'opencode'); + } + + // Linux and macOS use XDG_DATA_HOME or ~/.local/share + const xdgData = process.env.XDG_DATA_HOME || path.join(os.homedir(), '.local', 'share'); + return path.join(xdgData, 'opencode'); + } + + /** + * Get the path to the auth.json file + */ + function getAuthFilePath(): string { + return path.join(getDataPath(), 'auth.json'); + } + + /** + * Read and parse the auth file + */ + async function readAuthFile(): Promise> { + const filepath = getAuthFilePath(); + const file = Bun.file(filepath); + + if (!(await file.exists())) { + return {}; + } + + try { + const content = await file.json(); + const parsed = AuthFileSchema.safeParse(content); + if (!parsed.success) { + console.warn('Invalid auth.json format:', parsed.error); + return {}; + } + return parsed.data; + } catch (error) { + console.warn('Failed to read auth.json:', error); + return {}; + } + } + + /** + * Get stored credentials for a provider + * Returns undefined if no credentials are stored + */ + export async function getCredentials(providerId: string): Promise { + const authData = await readAuthFile(); + return authData[providerId]; + } + + /** + * Check if a provider is authenticated + */ + export async function isAuthenticated(providerId: string): Promise { + const auth = await getCredentials(providerId); + return auth !== undefined; + } + + /** + * Get the API key or access token for a provider + * Returns undefined if not authenticated or no key available + */ + export async function getApiKey(providerId: string): Promise { + const auth = await getCredentials(providerId); + if (!auth) return undefined; + + if (auth.type === 'api') { + return auth.key; + } + + if (auth.type === 'oauth') { + return auth.access; + } + + // wellknown auth doesn't have an API key + return undefined; + } + + /** + * Get all stored credentials + */ + export async function getAllCredentials(): Promise> { + return readAuthFile(); + } + + /** + * Get the list of all authenticated provider IDs + */ + export async function getAuthenticatedProviders(): Promise { + const authData = await readAuthFile(); + return Object.keys(authData); + } +} diff --git a/apps/server/src/providers/index.ts b/apps/server/src/providers/index.ts new file mode 100644 index 00000000..a038baa5 --- /dev/null +++ b/apps/server/src/providers/index.ts @@ -0,0 +1,14 @@ +/** + * Provider Abstraction Layer + * Exports auth, registry, and model utilities + */ +export { Auth } from './auth.ts'; +export { Model } from './model.ts'; +export { + PROVIDER_REGISTRY, + PROVIDER_ALIASES, + isProviderSupported, + normalizeProviderId, + getProviderFactory, + getSupportedProviders +} from './registry.ts'; diff --git a/apps/server/src/providers/model.ts b/apps/server/src/providers/model.ts new file mode 100644 index 00000000..4b79237f --- /dev/null +++ b/apps/server/src/providers/model.ts @@ -0,0 +1,124 @@ +/** + * Model Instantiation + * Creates AI SDK models with authentication from OpenCode + */ +import type { LanguageModel } from 'ai'; + +import { Auth } from './auth.ts'; +import { + getProviderFactory, + isProviderSupported, + normalizeProviderId, + type ProviderOptions +} from './registry.ts'; + +export namespace Model { + export class ProviderNotFoundError extends Error { + readonly _tag = 'ProviderNotFoundError'; + readonly providerId: string; + + constructor(providerId: string) { + super(`Provider "${providerId}" is not supported`); + this.providerId = providerId; + } + } + + export class ProviderNotAuthenticatedError extends Error { + readonly _tag = 'ProviderNotAuthenticatedError'; + readonly providerId: string; + + constructor(providerId: string) { + super( + `Provider "${providerId}" is not authenticated. Run 'opencode auth login' to authenticate.` + ); + this.providerId = providerId; + } + } + + export type ModelOptions = { + /** Additional provider options */ + providerOptions?: Partial; + /** Skip authentication check (useful for providers with wellknown auth) */ + skipAuth?: boolean; + }; + + /** + * Create an AI SDK model with authentication + * + * @param providerId - The provider ID (e.g., 'anthropic', 'openai') + * @param modelId - The model ID (e.g., 'claude-sonnet-4-20250514', 'gpt-4o') + * @param options - Additional options + * @returns The AI SDK language model + */ + export async function getModel( + providerId: string, + modelId: string, + options: ModelOptions = {} + ): Promise { + const normalizedProviderId = normalizeProviderId(providerId); + + // Check if provider is supported + if (!isProviderSupported(normalizedProviderId)) { + throw new ProviderNotFoundError(providerId); + } + + // Get the provider factory + const factory = getProviderFactory(normalizedProviderId); + if (!factory) { + throw new ProviderNotFoundError(providerId); + } + + // Get authentication + let apiKey: string | undefined; + + if (!options.skipAuth) { + apiKey = await Auth.getApiKey(normalizedProviderId); + + // Special handling for 'opencode' provider - it's a gateway that always works + if (!apiKey && normalizedProviderId !== 'opencode') { + throw new ProviderNotAuthenticatedError(providerId); + } + } + + // Build provider options + const providerOptions: ProviderOptions = { + ...options.providerOptions + }; + + if (apiKey) { + providerOptions.apiKey = apiKey; + } + + // Create the provider and get the model + const provider = factory(providerOptions); + const model = provider(modelId); + + return model as LanguageModel; + } + + /** + * Check if a model can be used (provider is supported and authenticated) + */ + export async function canUseModel(providerId: string): Promise { + const normalizedProviderId = normalizeProviderId(providerId); + + if (!isProviderSupported(normalizedProviderId)) { + return false; + } + + // Special case: opencode gateway is always available + if (normalizedProviderId === 'opencode') { + return true; + } + + return Auth.isAuthenticated(normalizedProviderId); + } + + /** + * Get all available providers (supported and authenticated) + */ + export async function getAvailableProviders(): Promise { + const authenticatedProviders = await Auth.getAuthenticatedProviders(); + return authenticatedProviders.filter((provider) => isProviderSupported(provider)); + } +} diff --git a/apps/server/src/providers/opencode.ts b/apps/server/src/providers/opencode.ts new file mode 100644 index 00000000..fb6f14e1 --- /dev/null +++ b/apps/server/src/providers/opencode.ts @@ -0,0 +1,142 @@ +/** + * OpenCode Zen Provider + * Routes to appropriate endpoints based on model type + * + * OpenCode Zen is a gateway that provides access to curated models. + * Each model type uses a specific endpoint and AI SDK: + * - Claude models → `@ai-sdk/anthropic` → https://opencode.ai/zen/v1/messages + * - GPT models → `@ai-sdk/openai` → https://opencode.ai/zen/v1/responses + * - Gemini models → `@ai-sdk/google` → https://opencode.ai/zen/v1/models/{model} + * - Others (GLM, Kimi, Qwen, Big Pickle) → `@ai-sdk/openai-compatible` → https://opencode.ai/zen/v1/chat/completions + */ +import { createAnthropic } from '@ai-sdk/anthropic'; +import { createGoogleGenerativeAI } from '@ai-sdk/google'; +import { createOpenAI } from '@ai-sdk/openai'; +import { createOpenAICompatible } from '@ai-sdk/openai-compatible'; +import type { LanguageModel } from 'ai'; + +// Zen API endpoints for each SDK type +// Note: Each SDK appends its own path segment to baseURL: +// - Anthropic SDK: appends /messages +// - OpenAI SDK responses: appends /responses +// - Google SDK: appends /models/{model} +// - OpenAI-compatible SDK: appends /chat/completions +// So we use the same base URL for all of them +const ZEN_BASE_URL = 'https://opencode.ai/zen/v1'; + +// Model prefixes and their corresponding provider types +const MODEL_ROUTING: Array<{ + prefix: string; + type: 'anthropic' | 'openai' | 'google' | 'compatible'; +}> = [ + { prefix: 'claude-', type: 'anthropic' }, + { prefix: 'gpt-', type: 'openai' }, + { prefix: 'gemini-', type: 'google' }, + // Everything else uses openai-compatible + { prefix: 'glm-', type: 'compatible' }, + { prefix: 'kimi-', type: 'compatible' }, + { prefix: 'qwen', type: 'compatible' }, + { prefix: 'big-pickle', type: 'compatible' } +]; + +/** + * Determine which provider type to use for a model + */ +function getModelType(modelId: string): 'anthropic' | 'openai' | 'google' | 'compatible' { + const lowerId = modelId.toLowerCase(); + + for (const route of MODEL_ROUTING) { + if (lowerId.startsWith(route.prefix)) { + return route.type; + } + } + + // Default to openai-compatible for unknown models + return 'compatible'; +} + +/** + * Create an OpenCode Zen provider + */ +export function createOpenCodeZen(options: { apiKey: string }) { + const { apiKey } = options; + + return function opencode(modelId: string): LanguageModel { + const modelType = getModelType(modelId); + + switch (modelType) { + case 'anthropic': { + // Anthropic SDK appends /messages to baseURL + const provider = createAnthropic({ + apiKey, + baseURL: ZEN_BASE_URL + }); + return provider(modelId); + } + + case 'openai': { + // OpenAI SDK .responses() appends /responses to baseURL + const provider = createOpenAI({ + apiKey, + baseURL: ZEN_BASE_URL + }); + return provider.responses(modelId); + } + + case 'google': { + // Google SDK appends /models/{model} to baseURL + const provider = createGoogleGenerativeAI({ + apiKey, + baseURL: ZEN_BASE_URL + }); + return provider(modelId); + } + + case 'compatible': + default: { + // OpenAI-compatible SDK appends /chat/completions to baseURL + const provider = createOpenAICompatible({ + name: 'opencode-zen', + apiKey, + baseURL: ZEN_BASE_URL + }); + return provider.chatModel(modelId); + } + } + }; +} + +/** + * Available OpenCode Zen models + */ +export const OPENCODE_ZEN_MODELS = [ + // GPT models + 'gpt-5.2', + 'gpt-5.2-codex', + 'gpt-5.1', + 'gpt-5.1-codex', + 'gpt-5.1-codex-max', + 'gpt-5.1-codex-mini', + 'gpt-5', + 'gpt-5-codex', + 'gpt-5-nano', + // Claude models + 'claude-sonnet-4-5', + 'claude-sonnet-4', + 'claude-haiku-4-5', + 'claude-3-5-haiku', + 'claude-opus-4-5', + 'claude-opus-4-1', + // Gemini models + 'gemini-3-pro', + 'gemini-3-flash', + // Other models + 'glm-4.7', + 'glm-4.6', + 'kimi-k2', + 'kimi-k2-thinking', + 'qwen3-coder', + 'big-pickle' +] as const; + +export type OpenCodeZenModel = (typeof OPENCODE_ZEN_MODELS)[number]; diff --git a/apps/server/src/providers/registry.ts b/apps/server/src/providers/registry.ts new file mode 100644 index 00000000..489e43bd --- /dev/null +++ b/apps/server/src/providers/registry.ts @@ -0,0 +1,112 @@ +/** + * Provider Registry + * Maps provider IDs to their AI SDK factory functions + */ +import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock'; +import { createAnthropic } from '@ai-sdk/anthropic'; +import { createAzure } from '@ai-sdk/azure'; +import { createCerebras } from '@ai-sdk/cerebras'; +import { createCohere } from '@ai-sdk/cohere'; +import { createDeepInfra } from '@ai-sdk/deepinfra'; +import { createGoogleGenerativeAI } from '@ai-sdk/google'; +import { createVertex } from '@ai-sdk/google-vertex'; +import { createGroq } from '@ai-sdk/groq'; +import { createMistral } from '@ai-sdk/mistral'; +import { createOpenAI } from '@ai-sdk/openai'; +import { createOpenAICompatible } from '@ai-sdk/openai-compatible'; +import { createPerplexity } from '@ai-sdk/perplexity'; +import { createTogetherAI } from '@ai-sdk/togetherai'; +import { createXai } from '@ai-sdk/xai'; + +import { createOpenCodeZen } from './opencode.ts'; + +// Type for provider factory options +export type ProviderOptions = { + apiKey?: string; + baseURL?: string; + headers?: Record; + name?: string; // Required for openai-compatible +}; + +// Type for a provider factory function +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type ProviderFactory = (options?: any) => { + (modelId: string, settings?: Record): unknown; +}; + +// Registry of all supported providers +export const PROVIDER_REGISTRY: Record = { + // OpenCode Zen (curated models gateway) + opencode: createOpenCodeZen as ProviderFactory, + + // Anthropic + anthropic: createAnthropic as ProviderFactory, + + // OpenAI + openai: createOpenAI as ProviderFactory, + + // Google + google: createGoogleGenerativeAI as ProviderFactory, + 'google-vertex': createVertex as ProviderFactory, + + // Amazon + 'amazon-bedrock': createAmazonBedrock as ProviderFactory, + + // Azure + azure: createAzure as ProviderFactory, + + // Other providers + groq: createGroq as ProviderFactory, + mistral: createMistral as ProviderFactory, + xai: createXai as ProviderFactory, + cohere: createCohere as ProviderFactory, + deepinfra: createDeepInfra as ProviderFactory, + cerebras: createCerebras as ProviderFactory, + perplexity: createPerplexity as ProviderFactory, + togetherai: createTogetherAI as ProviderFactory, + + // OpenAI-compatible providers (for custom endpoints) + 'openai-compatible': createOpenAICompatible as ProviderFactory +}; + +// Provider aliases for common naming variations +export const PROVIDER_ALIASES: Record = { + claude: 'anthropic', + 'gpt-4': 'openai', + 'gpt-4o': 'openai', + gemini: 'google', + vertex: 'google-vertex', + bedrock: 'amazon-bedrock', + grok: 'xai', + together: 'togetherai' +}; + +/** + * Check if a provider is supported + */ +export function isProviderSupported(providerId: string): boolean { + const normalized = PROVIDER_ALIASES[providerId] || providerId; + return normalized in PROVIDER_REGISTRY; +} + +/** + * Get the normalized provider ID + */ +export function normalizeProviderId(providerId: string): string { + return PROVIDER_ALIASES[providerId] || providerId; +} + +/** + * Get a provider factory by ID + */ +export function getProviderFactory(providerId: string): ProviderFactory | undefined { + const normalized = normalizeProviderId(providerId); + return PROVIDER_REGISTRY[normalized]; +} + +/** + * Get all supported provider IDs + */ +export function getSupportedProviders(): string[] { + return Object.keys(PROVIDER_REGISTRY); +} diff --git a/apps/server/src/stream/service.ts b/apps/server/src/stream/service.ts index e893729d..832b0ca5 100644 --- a/apps/server/src/stream/service.ts +++ b/apps/server/src/stream/service.ts @@ -1,42 +1,17 @@ -import type { OcEvent } from '../agent/types.ts'; import { getErrorMessage, getErrorTag } from '../errors.ts'; import { Metrics } from '../metrics/index.ts'; -import { - StreamingTagStripper, - extractCoreQuestion, - stripUserQuestionFromStart -} from '@btca/shared'; +import { stripUserQuestionFromStart, extractCoreQuestion } from '@btca/shared'; +import type { AgentLoop } from '../agent/loop.ts'; import type { BtcaStreamDoneEvent, BtcaStreamErrorEvent, BtcaStreamEvent, BtcaStreamMetaEvent, - BtcaStreamReasoningDeltaEvent, BtcaStreamTextDeltaEvent, BtcaStreamToolUpdatedEvent } from './types.ts'; -type Accumulator = { - partIds: string[]; - partText: Map; - combined: string; -}; - -const makeAccumulator = (): Accumulator => ({ partIds: [], partText: new Map(), combined: '' }); - -const updateAccumulator = (acc: Accumulator, partId: string, nextText: string): string => { - if (!acc.partIds.includes(partId)) acc.partIds.push(partId); - acc.partText.set(partId, nextText); - - const nextCombined = acc.partIds.map((id) => acc.partText.get(id) ?? '').join(''); - const delta = nextCombined.startsWith(acc.combined) - ? nextCombined.slice(acc.combined.length) - : nextCombined; - acc.combined = nextCombined; - return delta; -}; - const toSse = (event: BtcaStreamEvent): string => { // Standard SSE: an event name + JSON payload. return `event: ${event.type}\ndata: ${JSON.stringify(event)}\n\n`; @@ -45,28 +20,11 @@ const toSse = (event: BtcaStreamEvent): string => { export namespace StreamService { export const createSseStream = (args: { meta: BtcaStreamMetaEvent; - eventStream: AsyncIterable; + eventStream: AsyncIterable; question?: string; // Original question - used to filter echoed user message }): ReadableStream => { const encoder = new TextEncoder(); - const text = makeAccumulator(); - const reasoning = makeAccumulator(); - const toolsByCallId = new Map>(); - - let toolUpdates = 0; - let textEvents = 0; - let reasoningEvents = 0; - - // Create streaming tag stripper for filtering history markers - const tagStripper = new StreamingTagStripper(); - - // Extract the core question for stripping echoed user message from final response - const coreQuestion = extractCoreQuestion(args.question); - - // Track total emitted text for accurate final text reconstruction - let emittedText = ''; - const emit = ( controller: ReadableStreamDefaultController, event: BtcaStreamEvent @@ -74,6 +32,15 @@ export namespace StreamService { controller.enqueue(encoder.encode(toSse(event))); }; + // Track accumulated text and tool state + let accumulatedText = ''; + const toolsByCallId = new Map>(); + let textEvents = 0; + let toolEvents = 0; + + // Extract the core question for stripping echoed user message from final response + const coreQuestion = extractCoreQuestion(args.question); + return new ReadableStream({ start(controller) { Metrics.info('stream.start', { @@ -87,101 +54,107 @@ export namespace StreamService { (async () => { try { for await (const event of args.eventStream) { - if (event.type === 'message.part.updated') { - const props = event.properties as any; - const part: any = props?.part; - if (!part || typeof part !== 'object') continue; - - // Skip user messages - only stream assistant responses - const messageRole = props?.message?.role ?? props?.role; - if (messageRole === 'user') { - continue; + switch (event.type) { + case 'text-delta': { + textEvents += 1; + accumulatedText += event.text; + + const msg: BtcaStreamTextDeltaEvent = { + type: 'text.delta', + delta: event.text + }; + emit(controller, msg); + break; } - if (part.type === 'text') { - const partId = String(part.id); - const nextText = String(part.text ?? ''); + case 'tool-call': { + toolEvents += 1; + const callID = `tool-${toolEvents}`; - // Get the raw delta from accumulator - const rawDelta = updateAccumulator(text, partId, nextText); - - if (rawDelta.length > 0) { - // Filter through the streaming tag stripper - const cleanDelta = tagStripper.process(rawDelta); - - if (cleanDelta.length > 0) { - textEvents += 1; - emittedText += cleanDelta; - const msg: BtcaStreamTextDeltaEvent = { - type: 'text.delta', - delta: cleanDelta - }; - emit(controller, msg); + // Store tool call info + toolsByCallId.set(callID, { + callID, + tool: event.toolName, + state: { + status: 'running', + input: event.input } - } - continue; - } - - if (part.type === 'reasoning') { - const partId = String(part.id); - const nextText = String(part.text ?? ''); - const delta = updateAccumulator(reasoning, partId, nextText); - if (delta.length > 0) { - reasoningEvents += 1; - const msg: BtcaStreamReasoningDeltaEvent = { type: 'reasoning.delta', delta }; - emit(controller, msg); - } - continue; - } - - if (part.type === 'tool') { - const callID = String(part.callID); - const tool = String(part.tool); - const state = part.state as any; + }); const update: BtcaStreamToolUpdatedEvent = { type: 'tool.updated', callID, - tool, - state + tool: event.toolName, + state: { + status: 'running', + input: event.input + } }; - toolUpdates += 1; - toolsByCallId.set(callID, { callID, tool, state }); emit(controller, update); - continue; + break; } - } - if (event.type === 'session.idle') { - const tools = Array.from(toolsByCallId.values()); + case 'tool-result': { + // Find the tool call and update its state + for (const [callID, tool] of toolsByCallId) { + if (tool.tool === event.toolName && tool.state?.status === 'running') { + tool.state = { + status: 'completed', + input: tool.state.input, + output: event.output + }; + + const update: BtcaStreamToolUpdatedEvent = { + type: 'tool.updated', + callID, + tool: event.toolName, + state: tool.state + }; + emit(controller, update); + break; + } + } + break; + } - // Flush any remaining buffered content from the tag stripper - const flushed = tagStripper.flush(); - if (flushed.length > 0) { - emittedText += flushed; + case 'finish': { + const tools = Array.from(toolsByCallId.values()); + + // Strip the echoed user question from the final text + let finalText = stripUserQuestionFromStart(accumulatedText, coreQuestion); + + Metrics.info('stream.done', { + collectionKey: args.meta.collection.key, + textLength: finalText.length, + toolCount: tools.length, + textEvents, + toolEvents, + finishReason: event.finishReason + }); + + const done: BtcaStreamDoneEvent = { + type: 'done', + text: finalText, + reasoning: '', // We don't have reasoning in the new format + tools + }; + emit(controller, done); + break; } - // Strip the echoed user question from the final text - let finalText = stripUserQuestionFromStart(emittedText, coreQuestion); - - Metrics.info('stream.done', { - collectionKey: args.meta.collection.key, - textLength: finalText.length, - reasoningLength: reasoning.combined.length, - toolCount: tools.length, - toolUpdates, - textEvents, - reasoningEvents - }); - - const done: BtcaStreamDoneEvent = { - type: 'done', - text: finalText, - reasoning: reasoning.combined, - tools - }; - emit(controller, done); - continue; + case 'error': { + Metrics.error('stream.error', { + collectionKey: args.meta.collection.key, + error: Metrics.errorInfo(event.error) + }); + const err: BtcaStreamErrorEvent = { + type: 'error', + tag: getErrorTag(event.error), + message: getErrorMessage(event.error) + }; + emit(controller, err); + break; + } } } } catch (cause) { diff --git a/apps/server/src/stream/types.ts b/apps/server/src/stream/types.ts index 6225d04a..e4aad068 100644 --- a/apps/server/src/stream/types.ts +++ b/apps/server/src/stream/types.ts @@ -30,30 +30,32 @@ export const BtcaStreamReasoningDeltaEventSchema = z.object({ export const BtcaToolStateSchema = z.discriminatedUnion('status', [ z.object({ status: z.literal('pending'), - input: z.record(z.unknown()), - raw: z.string() + input: z.unknown(), + raw: z.string().optional() }), z.object({ status: z.literal('running'), - input: z.record(z.unknown()), + input: z.unknown(), title: z.string().optional(), metadata: z.record(z.unknown()).optional(), - time: z.object({ start: z.number() }) + time: z.object({ start: z.number() }).optional() }), z.object({ status: z.literal('completed'), - input: z.record(z.unknown()), + input: z.unknown(), output: z.string(), - title: z.string(), - metadata: z.record(z.unknown()), - time: z.object({ start: z.number(), end: z.number(), compacted: z.number().optional() }) + title: z.string().optional(), + metadata: z.record(z.unknown()).optional(), + time: z + .object({ start: z.number(), end: z.number(), compacted: z.number().optional() }) + .optional() }), z.object({ status: z.literal('error'), - input: z.record(z.unknown()), + input: z.unknown(), error: z.string(), metadata: z.record(z.unknown()).optional(), - time: z.object({ start: z.number(), end: z.number() }) + time: z.object({ start: z.number(), end: z.number() }).optional() }) ]); diff --git a/apps/server/src/tools/glob.ts b/apps/server/src/tools/glob.ts new file mode 100644 index 00000000..4b259e64 --- /dev/null +++ b/apps/server/src/tools/glob.ts @@ -0,0 +1,140 @@ +/** + * Glob Tool + * Fast file pattern matching using ripgrep + */ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import { z } from 'zod'; + +import { Ripgrep } from './ripgrep.ts'; +import { Sandbox } from './sandbox.ts'; + +export namespace GlobTool { + // Configuration + const MAX_RESULTS = 100; + + // Schema for tool parameters + export const Parameters = z.object({ + pattern: z + .string() + .describe('The glob pattern to match files against (e.g. "**/*.ts", "src/**/*.js")'), + path: z + .string() + .optional() + .describe('The directory to search in. Defaults to the collection root.') + }); + + export type ParametersType = z.infer; + + // Result type + export type Result = { + title: string; + output: string; + metadata: { + count: number; + truncated: boolean; + }; + }; + + /** + * Execute the glob tool + */ + export async function execute( + params: ParametersType, + context: { basePath: string } + ): Promise { + const { basePath } = context; + + // Resolve search path within sandbox + const searchPath = params.path ? Sandbox.resolvePath(basePath, params.path) : basePath; + + // Validate the search path exists and is a directory + try { + const stats = await fs.stat(searchPath); + if (!stats.isDirectory()) { + return { + title: params.pattern, + output: `Path is not a directory: ${params.path || '.'}`, + metadata: { + count: 0, + truncated: false + } + }; + } + } catch { + return { + title: params.pattern, + output: `Directory not found: ${params.path || '.'}`, + metadata: { + count: 0, + truncated: false + } + }; + } + + // Collect files matching the pattern + const files: Array<{ path: string; mtime: number }> = []; + let truncated = false; + + for await (const file of Ripgrep.files({ + cwd: searchPath, + glob: [params.pattern], + hidden: true + })) { + if (files.length >= MAX_RESULTS) { + truncated = true; + break; + } + + const fullPath = path.resolve(searchPath, file); + + try { + const stats = await fs.stat(fullPath); + files.push({ + path: fullPath, + mtime: stats.mtime.getTime() + }); + } catch { + // Skip files we can't stat + files.push({ + path: fullPath, + mtime: 0 + }); + } + } + + if (files.length === 0) { + return { + title: params.pattern, + output: 'No files found matching pattern.', + metadata: { + count: 0, + truncated: false + } + }; + } + + // Sort by modification time (most recent first) + files.sort((a, b) => b.mtime - a.mtime); + + // Format output with relative paths + const outputLines = files.map((f) => path.relative(basePath, f.path)); + + // Add truncation notice + if (truncated) { + outputLines.push(''); + outputLines.push( + `[Truncated: Results limited to ${MAX_RESULTS} files. Use a more specific pattern for more targeted results.]` + ); + } + + return { + title: params.pattern, + output: outputLines.join('\n'), + metadata: { + count: files.length, + truncated + } + }; + } +} diff --git a/apps/server/src/tools/grep.ts b/apps/server/src/tools/grep.ts new file mode 100644 index 00000000..ea3d27f5 --- /dev/null +++ b/apps/server/src/tools/grep.ts @@ -0,0 +1,165 @@ +/** + * Grep Tool + * Searches file contents using regular expressions via ripgrep + */ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import { z } from 'zod'; + +import { Ripgrep } from './ripgrep.ts'; +import { Sandbox } from './sandbox.ts'; + +export namespace GrepTool { + // Configuration + const MAX_RESULTS = 100; + + // Schema for tool parameters + export const Parameters = z.object({ + pattern: z.string().describe('The regex pattern to search for in file contents'), + path: z + .string() + .optional() + .describe('The directory to search in. Defaults to the collection root.'), + include: z + .string() + .optional() + .describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")') + }); + + export type ParametersType = z.infer; + + // Result type + export type Result = { + title: string; + output: string; + metadata: { + matchCount: number; + fileCount: number; + truncated: boolean; + }; + }; + + /** + * Execute the grep tool + */ + export async function execute( + params: ParametersType, + context: { basePath: string } + ): Promise { + const { basePath } = context; + + // Resolve search path within sandbox + const searchPath = params.path ? Sandbox.resolvePath(basePath, params.path) : basePath; + + // Validate the search path exists and is a directory + try { + const stats = await fs.stat(searchPath); + if (!stats.isDirectory()) { + return { + title: params.pattern, + output: `Path is not a directory: ${params.path || '.'}`, + metadata: { + matchCount: 0, + fileCount: 0, + truncated: false + } + }; + } + } catch { + return { + title: params.pattern, + output: `Directory not found: ${params.path || '.'}`, + metadata: { + matchCount: 0, + fileCount: 0, + truncated: false + } + }; + } + + // Run ripgrep search + const results = await Ripgrep.search({ + cwd: searchPath, + pattern: params.pattern, + glob: params.include, + hidden: true, + maxResults: MAX_RESULTS + 1 // Get one extra to check for truncation + }); + + if (results.length === 0) { + return { + title: params.pattern, + output: 'No matches found.', + metadata: { + matchCount: 0, + fileCount: 0, + truncated: false + } + }; + } + + // Check for truncation + const truncated = results.length > MAX_RESULTS; + const displayResults = truncated ? results.slice(0, MAX_RESULTS) : results; + + // Sort by modification time (most recent first) + // Get file modification times + const filesWithMtime = await Promise.all( + displayResults.map(async (result) => { + try { + const stats = await fs.stat(result.path); + return { ...result, mtime: stats.mtime.getTime() }; + } catch { + return { ...result, mtime: 0 }; + } + }) + ); + + filesWithMtime.sort((a, b) => b.mtime - a.mtime); + + // Group results by file + const fileGroups = new Map>(); + + for (const result of filesWithMtime) { + const relativePath = path.relative(basePath, result.path); + if (!fileGroups.has(relativePath)) { + fileGroups.set(relativePath, []); + } + fileGroups.get(relativePath)!.push({ + lineNumber: result.lineNumber, + lineText: result.lineText + }); + } + + // Format output + const outputLines: string[] = []; + + for (const [filePath, matches] of fileGroups) { + outputLines.push(`${filePath}:`); + for (const match of matches) { + // Truncate long lines + const lineText = + match.lineText.length > 200 ? match.lineText.substring(0, 200) + '...' : match.lineText; + outputLines.push(` ${match.lineNumber}: ${lineText}`); + } + outputLines.push(''); // Empty line between files + } + + // Add truncation notice + if (truncated) { + outputLines.push( + `[Truncated: Results limited to ${MAX_RESULTS} matches. Narrow your search pattern for more specific results.]` + ); + } + + return { + title: params.pattern, + output: outputLines.join('\n').trim(), + metadata: { + matchCount: displayResults.length, + fileCount: fileGroups.size, + truncated + } + }; + } +} diff --git a/apps/server/src/tools/index.ts b/apps/server/src/tools/index.ts new file mode 100644 index 00000000..c7eb9d0c --- /dev/null +++ b/apps/server/src/tools/index.ts @@ -0,0 +1,10 @@ +/** + * Tools Module + * Exports all agent tools and utilities + */ +export { ReadTool } from './read.ts'; +export { GrepTool } from './grep.ts'; +export { GlobTool } from './glob.ts'; +export { ListTool } from './list.ts'; +export { Ripgrep } from './ripgrep.ts'; +export { Sandbox } from './sandbox.ts'; diff --git a/apps/server/src/tools/list.ts b/apps/server/src/tools/list.ts new file mode 100644 index 00000000..ab722f10 --- /dev/null +++ b/apps/server/src/tools/list.ts @@ -0,0 +1,182 @@ +/** + * List Tool + * Lists directory contents with file types + */ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import { z } from 'zod'; + +import { Sandbox } from './sandbox.ts'; + +export namespace ListTool { + // Schema for tool parameters + export const Parameters = z.object({ + path: z.string().describe('The directory path to list') + }); + + export type ParametersType = z.infer; + + // Entry type + export type Entry = { + name: string; + type: 'file' | 'directory' | 'symlink' | 'other'; + size?: number; + }; + + // Result type + export type Result = { + title: string; + output: string; + metadata: { + entries: Entry[]; + fileCount: number; + directoryCount: number; + }; + }; + + /** + * Execute the list tool + */ + export async function execute( + params: ParametersType, + context: { basePath: string } + ): Promise { + const { basePath } = context; + + // Resolve path within sandbox + const resolvedPath = Sandbox.resolvePath(basePath, params.path); + + // Check if path exists + try { + const stats = await fs.stat(resolvedPath); + if (!stats.isDirectory()) { + return { + title: params.path, + output: `Path is not a directory: ${params.path}`, + metadata: { + entries: [], + fileCount: 0, + directoryCount: 0 + } + }; + } + } catch { + return { + title: params.path, + output: `Directory not found: ${params.path}`, + metadata: { + entries: [], + fileCount: 0, + directoryCount: 0 + } + }; + } + + // Read directory contents + const dirents = await fs.readdir(resolvedPath, { withFileTypes: true }); + + // Process entries + const entries: Entry[] = []; + + for (const dirent of dirents) { + let type: Entry['type'] = 'other'; + let size: number | undefined; + + if (dirent.isDirectory()) { + type = 'directory'; + } else if (dirent.isFile()) { + type = 'file'; + try { + const stats = await fs.stat(path.join(resolvedPath, dirent.name)); + size = stats.size; + } catch { + // Ignore stat errors + } + } else if (dirent.isSymbolicLink()) { + type = 'symlink'; + // Try to determine if symlink points to file or directory + try { + const stats = await fs.stat(path.join(resolvedPath, dirent.name)); + if (stats.isDirectory()) { + type = 'directory'; + } else if (stats.isFile()) { + type = 'file'; + size = stats.size; + } + } catch { + // Keep as symlink if we can't resolve + type = 'symlink'; + } + } + + entries.push({ + name: dirent.name, + type, + size + }); + } + + // Sort: directories first, then files, alphabetically within each group + entries.sort((a, b) => { + if (a.type === 'directory' && b.type !== 'directory') return -1; + if (a.type !== 'directory' && b.type === 'directory') return 1; + return a.name.localeCompare(b.name); + }); + + // Count files and directories + const fileCount = entries.filter((e) => e.type === 'file').length; + const directoryCount = entries.filter((e) => e.type === 'directory').length; + + // Format output + const outputLines: string[] = []; + + for (const entry of entries) { + let line: string; + + if (entry.type === 'directory') { + line = `[DIR] ${entry.name}/`; + } else if (entry.type === 'symlink') { + line = `[LNK] ${entry.name}`; + } else if (entry.type === 'file') { + const sizeStr = entry.size !== undefined ? formatSize(entry.size) : ''; + line = `[FILE] ${entry.name}${sizeStr ? ` (${sizeStr})` : ''}`; + } else { + line = `[???] ${entry.name}`; + } + + outputLines.push(line); + } + + // Add summary + outputLines.push(''); + outputLines.push( + `Total: ${entries.length} items (${directoryCount} directories, ${fileCount} files)` + ); + + return { + title: params.path, + output: outputLines.join('\n'), + metadata: { + entries, + fileCount, + directoryCount + } + }; + } + + /** + * Format file size in human-readable format + */ + function formatSize(bytes: number): string { + const units = ['B', 'KB', 'MB', 'GB']; + let size = bytes; + let unitIndex = 0; + + while (size >= 1024 && unitIndex < units.length - 1) { + size /= 1024; + unitIndex++; + } + + return `${size.toFixed(unitIndex === 0 ? 0 : 1)} ${units[unitIndex]}`; + } +} diff --git a/apps/server/src/tools/read.ts b/apps/server/src/tools/read.ts new file mode 100644 index 00000000..e61e352f --- /dev/null +++ b/apps/server/src/tools/read.ts @@ -0,0 +1,255 @@ +/** + * Read Tool + * Reads file contents with line numbers, truncation, and special file handling + */ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import { z } from 'zod'; + +import { Sandbox } from './sandbox.ts'; + +export namespace ReadTool { + // Configuration + const MAX_LINES = 2000; + const MAX_BYTES = 50 * 1024; // 50KB + const MAX_LINE_LENGTH = 2000; + + // Schema for tool parameters + export const Parameters = z.object({ + path: z.string().describe('The absolute path to the file to read'), + offset: z.coerce + .number() + .optional() + .describe('The line number to start reading from (0-based)'), + limit: z.coerce.number().optional().describe('The number of lines to read (defaults to 2000)') + }); + + export type ParametersType = z.infer; + + // Result type + export type Result = { + title: string; + output: string; + metadata: { + lines: number; + truncated: boolean; + truncatedByLines?: boolean; + truncatedByBytes?: boolean; + isImage?: boolean; + isPdf?: boolean; + isBinary?: boolean; + }; + // For images/PDFs, we return attachments + attachments?: Array<{ + type: 'file'; + mime: string; + data: string; // base64 + }>; + }; + + // Image extensions + const IMAGE_EXTENSIONS = new Set([ + '.png', + '.jpg', + '.jpeg', + '.gif', + '.webp', + '.bmp', + '.ico', + '.svg' + ]); + + // PDF extension + const PDF_EXTENSIONS = new Set(['.pdf']); + + /** + * Check if a file is binary by looking for null bytes + */ + async function isBinaryFile(filepath: string): Promise { + const file = Bun.file(filepath); + const chunk = await file.slice(0, 8192).arrayBuffer(); + const bytes = new Uint8Array(chunk); + + for (const byte of bytes) { + if (byte === 0) { + return true; + } + } + + return false; + } + + /** + * Execute the read tool + */ + export async function execute( + params: ParametersType, + context: { basePath: string } + ): Promise { + const { basePath } = context; + + // Validate and resolve path within sandbox + const resolvedPath = await Sandbox.resolvePathWithSymlinks(basePath, params.path); + + // Check if file exists + const file = Bun.file(resolvedPath); + if (!(await file.exists())) { + // Try to provide suggestions + const dir = path.dirname(resolvedPath); + const filename = path.basename(resolvedPath); + let suggestions: string[] = []; + + try { + const files = await fs.readdir(dir); + suggestions = files + .filter((f) => f.toLowerCase().includes(filename.toLowerCase().slice(0, 3))) + .slice(0, 5); + } catch { + // Directory doesn't exist + } + + const suggestionText = + suggestions.length > 0 + ? `\nDid you mean:\n${suggestions.map((s) => ` - ${s}`).join('\n')}` + : ''; + + return { + title: params.path, + output: `File not found: ${params.path}${suggestionText}`, + metadata: { + lines: 0, + truncated: false + } + }; + } + + const ext = path.extname(resolvedPath).toLowerCase(); + + // Handle images + if (IMAGE_EXTENSIONS.has(ext)) { + const bytes = await file.arrayBuffer(); + const base64 = Buffer.from(bytes).toString('base64'); + const mime = file.type || 'application/octet-stream'; + + return { + title: params.path, + output: `[Image file: ${path.basename(resolvedPath)}]`, + metadata: { + lines: 0, + truncated: false, + isImage: true + }, + attachments: [ + { + type: 'file', + mime, + data: base64 + } + ] + }; + } + + // Handle PDFs + if (PDF_EXTENSIONS.has(ext)) { + const bytes = await file.arrayBuffer(); + const base64 = Buffer.from(bytes).toString('base64'); + + return { + title: params.path, + output: `[PDF file: ${path.basename(resolvedPath)}]`, + metadata: { + lines: 0, + truncated: false, + isPdf: true + }, + attachments: [ + { + type: 'file', + mime: 'application/pdf', + data: base64 + } + ] + }; + } + + // Check for binary files + if (await isBinaryFile(resolvedPath)) { + return { + title: params.path, + output: `[Binary file: ${path.basename(resolvedPath)}]`, + metadata: { + lines: 0, + truncated: false, + isBinary: true + } + }; + } + + // Read text file + const text = await file.text(); + const allLines = text.split('\n'); + + const offset = params.offset ?? 0; + const limit = params.limit ?? MAX_LINES; + + // Apply truncation + let truncatedByLines = false; + let truncatedByBytes = false; + + const outputLines: string[] = []; + let totalBytes = 0; + + const endLine = Math.min(allLines.length, offset + limit); + + for (let i = offset; i < endLine; i++) { + let line = allLines[i] ?? ''; + + // Truncate long lines + if (line.length > MAX_LINE_LENGTH) { + line = line.substring(0, MAX_LINE_LENGTH) + '...'; + } + + const lineBytes = Buffer.byteLength(line, 'utf8'); + + if (totalBytes + lineBytes > MAX_BYTES) { + truncatedByBytes = true; + break; + } + + outputLines.push(line); + totalBytes += lineBytes; + } + + if (outputLines.length < endLine - offset || endLine < allLines.length) { + truncatedByLines = !truncatedByBytes && outputLines.length >= limit; + } + + // Format output with line numbers + const formattedOutput = outputLines + .map((line, index) => { + const lineNum = (index + offset + 1).toString().padStart(5, ' '); + return `${lineNum}\t${line}`; + }) + .join('\n'); + + // Build truncation message + let truncationMessage = ''; + if (truncatedByBytes || truncatedByLines) { + const remaining = allLines.length - offset - outputLines.length; + if (remaining > 0) { + truncationMessage = `\n\n[Truncated: ${remaining} more lines. Use offset=${offset + outputLines.length} to continue reading.]`; + } + } + + return { + title: params.path, + output: formattedOutput + truncationMessage, + metadata: { + lines: outputLines.length, + truncated: truncatedByBytes || truncatedByLines, + truncatedByLines, + truncatedByBytes + } + }; + } +} diff --git a/apps/server/src/tools/ripgrep.ts b/apps/server/src/tools/ripgrep.ts new file mode 100644 index 00000000..3273d109 --- /dev/null +++ b/apps/server/src/tools/ripgrep.ts @@ -0,0 +1,348 @@ +/** + * Ripgrep Binary Management + * Handles downloading and caching the ripgrep binary + */ +import * as fs from 'node:fs/promises'; +import * as os from 'node:os'; +import * as path from 'node:path'; + +export namespace Ripgrep { + const VERSION = '14.1.1'; + + // Platform configurations + const PLATFORM_CONFIG: Record< + string, + { platform: string; extension: 'tar.gz' | 'zip'; binaryName: string } + > = { + 'darwin-arm64': { + platform: 'aarch64-apple-darwin', + extension: 'tar.gz', + binaryName: 'rg' + }, + 'darwin-x64': { + platform: 'x86_64-apple-darwin', + extension: 'tar.gz', + binaryName: 'rg' + }, + 'linux-arm64': { + platform: 'aarch64-unknown-linux-gnu', + extension: 'tar.gz', + binaryName: 'rg' + }, + 'linux-x64': { + platform: 'x86_64-unknown-linux-musl', + extension: 'tar.gz', + binaryName: 'rg' + }, + 'win32-x64': { + platform: 'x86_64-pc-windows-msvc', + extension: 'zip', + binaryName: 'rg.exe' + } + }; + + /** + * Get the btca data directory + */ + function getDataPath(): string { + const platform = os.platform(); + + if (platform === 'win32') { + const appdata = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming'); + return path.join(appdata, 'btca'); + } + + // Linux and macOS use XDG_DATA_HOME or ~/.local/share + const xdgData = process.env.XDG_DATA_HOME || path.join(os.homedir(), '.local', 'share'); + return path.join(xdgData, 'btca'); + } + + /** + * Get the bin directory for storing binaries + */ + function getBinPath(): string { + return path.join(getDataPath(), 'bin'); + } + + /** + * Get the expected ripgrep binary path + */ + function getRipgrepPath(): string { + const platform = os.platform(); + const binaryName = platform === 'win32' ? 'rg.exe' : 'rg'; + return path.join(getBinPath(), binaryName); + } + + /** + * Check if ripgrep is already installed in PATH + */ + async function findInPath(): Promise { + const rgPath = Bun.which('rg'); + return rgPath || null; + } + + /** + * Check if our cached ripgrep binary exists + */ + async function findCached(): Promise { + const rgPath = getRipgrepPath(); + const file = Bun.file(rgPath); + if (await file.exists()) { + return rgPath; + } + return null; + } + + /** + * Get the platform configuration + */ + function getPlatformConfig(): (typeof PLATFORM_CONFIG)[string] | null { + const platform = os.platform(); + const arch = os.arch(); + const key = `${platform}-${arch}`; + return PLATFORM_CONFIG[key] || null; + } + + /** + * Download ripgrep from GitHub releases + */ + async function download(): Promise { + const config = getPlatformConfig(); + if (!config) { + throw new Error(`Unsupported platform: ${os.platform()}-${os.arch()}`); + } + + const binDir = getBinPath(); + const rgPath = getRipgrepPath(); + + // Ensure bin directory exists + await fs.mkdir(binDir, { recursive: true }); + + // Build download URL + const filename = `ripgrep-${VERSION}-${config.platform}.${config.extension}`; + const url = `https://github.com/BurntSushi/ripgrep/releases/download/${VERSION}/${filename}`; + + console.log(`Downloading ripgrep from ${url}...`); + + // Download the archive + const response = await fetch(url); + if (!response.ok) { + throw new Error(`Failed to download ripgrep: ${response.status} ${response.statusText}`); + } + + const buffer = await response.arrayBuffer(); + const archivePath = path.join(binDir, filename); + + // Write archive to disk + await Bun.write(archivePath, buffer); + + // Extract based on file type + if (config.extension === 'tar.gz') { + // Extract tar.gz + const proc = Bun.spawn(['tar', '-xzf', archivePath, '--strip-components=1', '-C', binDir], { + cwd: binDir, + stdout: 'pipe', + stderr: 'pipe' + }); + await proc.exited; + + if (proc.exitCode !== 0) { + throw new Error(`Failed to extract ripgrep: exit code ${proc.exitCode}`); + } + } else { + // Extract zip (Windows) + // Use unzip if available, otherwise use Bun's built-in zip handling + const proc = Bun.spawn(['unzip', '-o', archivePath, '-d', binDir], { + cwd: binDir, + stdout: 'pipe', + stderr: 'pipe' + }); + await proc.exited; + + if (proc.exitCode !== 0) { + throw new Error(`Failed to extract ripgrep: exit code ${proc.exitCode}`); + } + } + + // Clean up archive + await fs.unlink(archivePath).catch(() => {}); + + // Make binary executable (Unix only) + if (os.platform() !== 'win32') { + await fs.chmod(rgPath, 0o755); + } + + console.log(`Ripgrep installed to ${rgPath}`); + + return rgPath; + } + + /** + * Get the path to the ripgrep binary + * Downloads it if not found in PATH or cache + */ + export async function filepath(): Promise { + // First check PATH + const inPath = await findInPath(); + if (inPath) { + return inPath; + } + + // Then check cache + const cached = await findCached(); + if (cached) { + return cached; + } + + // Download if not found + return download(); + } + + /** + * Run ripgrep with the given arguments + */ + export async function run( + args: string[], + options: { cwd?: string } = {} + ): Promise<{ + stdout: string; + stderr: string; + exitCode: number; + }> { + const rgPath = await filepath(); + + const proc = Bun.spawn([rgPath, ...args], { + cwd: options.cwd || process.cwd(), + stdout: 'pipe', + stderr: 'pipe' + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited + ]); + + return { stdout, stderr, exitCode }; + } + + /** + * Generator that yields file paths matching a glob pattern + */ + export async function* files(options: { + cwd: string; + glob?: string[]; + hidden?: boolean; + }): AsyncGenerator { + const rgPath = await filepath(); + + const args = ['--files', '--follow', '--no-messages']; + + if (options.hidden) { + args.push('--hidden'); + } + + if (options.glob) { + for (const pattern of options.glob) { + args.push('--glob', pattern); + } + } + + const proc = Bun.spawn([rgPath, ...args], { + cwd: options.cwd, + stdout: 'pipe', + stderr: 'pipe' + }); + + const stdout = await new Response(proc.stdout).text(); + await proc.exited; + + for (const line of stdout.trim().split('\n')) { + if (line) { + yield line; + } + } + } + + /** + * Search for a pattern in files + */ + export async function search(options: { + cwd: string; + pattern: string; + glob?: string; + hidden?: boolean; + maxResults?: number; + }): Promise< + Array<{ + path: string; + lineNumber: number; + lineText: string; + }> + > { + const rgPath = await filepath(); + + const args = [ + '-n', // line numbers + '-H', // filename + '--follow', // follow symlinks + '--no-messages', // suppress errors + '--field-match-separator=|' // use | as separator + ]; + + if (options.hidden) { + args.push('--hidden'); + } + + if (options.glob) { + args.push('--glob', options.glob); + } + + args.push('--regexp', options.pattern); + + const proc = Bun.spawn([rgPath, ...args], { + cwd: options.cwd, + stdout: 'pipe', + stderr: 'pipe' + }); + + const stdout = await new Response(proc.stdout).text(); + await proc.exited; + + const results: Array<{ + path: string; + lineNumber: number; + lineText: string; + }> = []; + + for (const line of stdout.trim().split('\n')) { + if (!line) continue; + + // Parse format: filepath|lineNum|lineText + const firstPipe = line.indexOf('|'); + if (firstPipe === -1) continue; + + const secondPipe = line.indexOf('|', firstPipe + 1); + if (secondPipe === -1) continue; + + const filePath = line.substring(0, firstPipe); + const lineNumStr = line.substring(firstPipe + 1, secondPipe); + const lineText = line.substring(secondPipe + 1); + + const lineNumber = parseInt(lineNumStr, 10); + if (isNaN(lineNumber)) continue; + + results.push({ + path: path.resolve(options.cwd, filePath), + lineNumber, + lineText + }); + + if (options.maxResults && results.length >= options.maxResults) { + break; + } + } + + return results; + } +} diff --git a/apps/server/src/tools/sandbox.ts b/apps/server/src/tools/sandbox.ts new file mode 100644 index 00000000..ba137b17 --- /dev/null +++ b/apps/server/src/tools/sandbox.ts @@ -0,0 +1,164 @@ +/** + * Path Sandboxing Utilities + * Ensures all file operations stay within the collections directory + */ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; + +export namespace Sandbox { + export class PathEscapeError extends Error { + readonly _tag = 'PathEscapeError'; + readonly requestedPath: string; + readonly basePath: string; + + constructor(requestedPath: string, basePath: string) { + super( + `Path "${requestedPath}" is outside the allowed directory "${basePath}". Access denied.` + ); + this.requestedPath = requestedPath; + this.basePath = basePath; + } + } + + export class PathNotFoundError extends Error { + readonly _tag = 'PathNotFoundError'; + readonly requestedPath: string; + + constructor(requestedPath: string) { + super(`Path "${requestedPath}" does not exist.`); + this.requestedPath = requestedPath; + } + } + + /** + * Resolve a path relative to the base path and validate it stays within bounds + * + * @param basePath - The allowed base directory (collections path) + * @param requestedPath - The path requested by the user/agent + * @returns The resolved absolute path + * @throws PathEscapeError if the path would escape the base directory + */ + export function resolvePath(basePath: string, requestedPath: string): string { + // Normalize the base path + const normalizedBase = path.resolve(basePath); + + // Resolve the requested path relative to the base + let resolved: string; + if (path.isAbsolute(requestedPath)) { + resolved = path.resolve(requestedPath); + } else { + resolved = path.resolve(normalizedBase, requestedPath); + } + + // Normalize to remove any .. or . components + resolved = path.normalize(resolved); + + // Check that the resolved path starts with the base path + // We need to ensure the path is either exactly the base or within it + const relative = path.relative(normalizedBase, resolved); + + // If the relative path starts with '..' or is absolute, it's outside the base + if (relative.startsWith('..') || path.isAbsolute(relative)) { + throw new PathEscapeError(requestedPath, basePath); + } + + return resolved; + } + + /** + * Resolve a path and follow symlinks, validating both the path and its target + * + * @param basePath - The allowed base directory (collections path) + * @param requestedPath - The path requested by the user/agent + * @returns The resolved real path (after following symlinks) + * @throws PathEscapeError if the path or symlink target would escape the base directory + */ + export async function resolvePathWithSymlinks( + basePath: string, + requestedPath: string + ): Promise { + // First validate the path itself + const resolved = resolvePath(basePath, requestedPath); + + try { + // Get the real path (follows symlinks) + const realPath = await fs.realpath(resolved); + + // For symlinks pointing outside, we allow it since the collection + // symlinks resources from various locations. The sandbox is about + // what the agent can ACCESS through the collection, not where the + // actual files live. + // + // The key security boundary is that: + // 1. The agent can only request paths within the collection directory + // 2. Those paths may be symlinks to actual resource locations + // 3. This is intentional - the collection IS the set of accessible resources + + return realPath; + } catch (error) { + // If realpath fails, the file doesn't exist + // Return the resolved path anyway for error messages + return resolved; + } + } + + /** + * Check if a path exists and is within the sandbox + */ + export async function exists(basePath: string, requestedPath: string): Promise { + try { + const resolved = resolvePath(basePath, requestedPath); + const file = Bun.file(resolved); + return await file.exists(); + } catch { + return false; + } + } + + /** + * Check if a path is a directory + */ + export async function isDirectory(basePath: string, requestedPath: string): Promise { + try { + const resolved = resolvePath(basePath, requestedPath); + const stats = await fs.stat(resolved); + return stats.isDirectory(); + } catch { + return false; + } + } + + /** + * Check if a path is a file + */ + export async function isFile(basePath: string, requestedPath: string): Promise { + try { + const resolved = resolvePath(basePath, requestedPath); + const stats = await fs.stat(resolved); + return stats.isFile(); + } catch { + return false; + } + } + + /** + * Validate a path exists and is within sandbox, throwing if not + */ + export async function validatePath(basePath: string, requestedPath: string): Promise { + const resolved = resolvePath(basePath, requestedPath); + + const file = Bun.file(resolved); + if (!(await file.exists())) { + throw new PathNotFoundError(requestedPath); + } + + return resolved; + } + + /** + * Get the relative path from base to the resolved path + */ + export function getRelativePath(basePath: string, resolvedPath: string): string { + return path.relative(basePath, resolvedPath); + } +} diff --git a/apps/server/test-phase1.ts b/apps/server/test-phase1.ts new file mode 100644 index 00000000..f3b7d19f --- /dev/null +++ b/apps/server/test-phase1.ts @@ -0,0 +1,139 @@ +#!/usr/bin/env bun +/** + * Manual test script for Phase 1+2 implementation + * Run with: bun apps/server/test-phase1.ts + */ + +import { Auth } from './src/providers/auth.ts'; +import { getSupportedProviders } from './src/providers/registry.ts'; +import { Model } from './src/providers/model.ts'; +import { Ripgrep } from './src/tools/ripgrep.ts'; +import { ReadTool, GrepTool, GlobTool, ListTool } from './src/tools/index.ts'; +import { AgentLoop } from './src/agent/loop.ts'; +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import * as os from 'node:os'; + +async function main() { + console.log('=== Phase 1+2 Test Script ===\n'); + + // 1. Test Auth + console.log('1. Testing Auth...'); + const providers = await Auth.getAuthenticatedProviders(); + console.log(` Authenticated providers: ${providers.join(', ') || 'none'}`); + + if (providers.length === 0) { + console.log(' ❌ No providers authenticated. Run `opencode auth` first.'); + process.exit(1); + } + console.log(' ✅ Auth working\n'); + + // 2. Test Provider Registry + console.log('2. Testing Provider Registry...'); + const supportedProviders = getSupportedProviders(); + console.log( + ` Supported providers: ${supportedProviders.slice(0, 5).join(', ')}... (${supportedProviders.length} total)` + ); + console.log(' ✅ Registry working\n'); + + // 3. Test Ripgrep + console.log('3. Testing Ripgrep...'); + const rgPath = await Ripgrep.filepath(); + console.log(` Ripgrep path: ${rgPath}`); + const rgResult = await Ripgrep.run(['--version']); + console.log(` Version: ${rgResult.stdout.trim().split('\n')[0]}`); + console.log(' ✅ Ripgrep working\n'); + + // 4. Test Tools with a temp directory + console.log('4. Testing Tools...'); + const testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'btca-test-')); + + // Create test files + await fs.writeFile(path.join(testDir, 'hello.txt'), 'Hello, World!\nThis is a test file.'); + await fs.mkdir(path.join(testDir, 'subdir')); + await fs.writeFile(path.join(testDir, 'subdir', 'nested.ts'), 'export const foo = "bar";'); + + // Test list + const listResult = await ListTool.execute({ path: '.' }, { basePath: testDir }); + console.log(` List result: ${listResult.output.split('\n').length} entries`); + + // Test read + const readResult = await ReadTool.execute({ path: 'hello.txt' }, { basePath: testDir }); + console.log( + ` Read result: ${readResult.output.includes('Hello, World!') ? 'content matches' : 'MISMATCH'}` + ); + + // Test glob + const globResult = await GlobTool.execute({ pattern: '**/*.ts' }, { basePath: testDir }); + console.log( + ` Glob result: ${globResult.output.includes('nested.ts') ? 'found .ts file' : 'NOT FOUND'}` + ); + + // Test grep + const grepResult = await GrepTool.execute({ pattern: 'foo' }, { basePath: testDir }); + console.log( + ` Grep result: ${grepResult.output.includes('nested.ts') ? 'found match' : 'NOT FOUND'}` + ); + + // Cleanup + await fs.rm(testDir, { recursive: true }); + console.log(' ✅ All tools working\n'); + + // 5. Test Model Creation (without calling API) + console.log('5. Testing Model Creation...'); + const firstProvider = providers[0]!; + // Use appropriate model ID based on provider + // For opencode, use big-pickle (free model) or claude-sonnet-4-5 + const testModelId = firstProvider === 'opencode' ? 'big-pickle' : 'claude-sonnet-4-20250514'; + try { + // Just test that we can create a model - don't actually call it + const model = await Model.getModel(firstProvider, testModelId); + console.log(` Created model for ${firstProvider}/${testModelId}`); + console.log(' ✅ Model creation working\n'); + } catch (e) { + console.log(` ⚠️ Could not create model for ${firstProvider}: ${e}`); + console.log(' (This might be expected if provider uses non-standard model IDs)\n'); + } + + // 6. Optional: Full Agent Loop Test (requires API call) + const runFullTest = process.argv.includes('--full'); + if (runFullTest) { + console.log('6. Testing Full Agent Loop (API call)...'); + const agentTestDir = await fs.mkdtemp(path.join(os.tmpdir(), 'btca-agent-')); + await fs.writeFile( + path.join(agentTestDir, 'README.md'), + '# Test Project\n\nThis project contains the secret code: ALPHA-123.' + ); + // Use appropriate model ID based on provider + // For opencode, use big-pickle (free) - uses openai-compatible endpoint + const agentModelId = firstProvider === 'opencode' ? 'big-pickle' : 'claude-sonnet-4-20250514'; + try { + const result = await AgentLoop.run({ + providerId: firstProvider, + modelId: agentModelId, + collectionPath: agentTestDir, + agentInstructions: 'This is a test collection.', + question: 'What is the secret code mentioned in the README?', + maxSteps: 5 + }); + + console.log(` Answer: ${result.answer.substring(0, 100)}...`); + console.log(` Events: ${result.events.length}`); + console.log(` Model: ${result.model.provider}/${result.model.model}`); + console.log(' ✅ Agent loop working\n'); + } catch (e) { + console.log(` ❌ Agent loop failed: ${e}\n`); + } + + await fs.rm(agentTestDir, { recursive: true }); + } else { + console.log('6. Skipping full agent loop test (run with --full to enable)\n'); + } + + console.log('=== All Phase 1+2 tests passed! ==='); +} + +main().catch((e) => { + console.error('Test failed:', e); + process.exit(1); +}); diff --git a/apps/web/CLERK_API_KEYS.md b/apps/web/CLERK_API_KEYS.md new file mode 100644 index 00000000..8cf0c0b5 --- /dev/null +++ b/apps/web/CLERK_API_KEYS.md @@ -0,0 +1,678 @@ +# Clerk API Keys Migration Plan + +This document outlines the migration from custom API key management to Clerk's API keys feature. + +## Overview + +**Current state:** Custom `apiKeys` table in Convex with SHA-256 hashing, custom UI for management +**Target state:** Clerk manages API key lifecycle, we track usage in a lightweight table + +## Prerequisites + +1. Enable API keys in Clerk Dashboard: + - Navigate to https://dashboard.clerk.com/~/platform/api-keys + - Select "Enable API keys" + - Enable "User API keys" (not organization keys) + +2. Add `@clerk/backend` dependency: + ```bash + cd apps/web && bun add @clerk/backend + ``` + +--- + +## Phase 1: Add New Schema and Verification Helper + +### 1.1 Update Schema + +**File:** `src/convex/schema.ts` + +Add new `apiKeyUsage` table for tracking usage (the old `apiKeys` table will be removed in Phase 4): + +```typescript +apiKeyUsage: defineTable({ + clerkApiKeyId: v.string(), // "ak_xxx" from Clerk + clerkUserId: v.string(), // "user_xxx" - the subject from Clerk + instanceId: v.id('instances'), + name: v.optional(v.string()), // Cached name for display + lastUsedAt: v.optional(v.number()), + usageCount: v.number(), + createdAt: v.number() +}) + .index('by_clerk_api_key_id', ['clerkApiKeyId']) + .index('by_instance', ['instanceId']); +``` + +### 1.2 Create Clerk Verification Helper + +**File:** `src/convex/clerkApiKeys.ts` (new file) + +```typescript +'use node'; + +import { createClerkClient } from '@clerk/backend'; +import { v } from 'convex/values'; + +import { internal } from './_generated/api'; +import type { Id } from './_generated/dataModel'; +import { action, internalMutation, internalQuery } from './_generated/server'; +import { AnalyticsEvents } from './analyticsEvents'; + +const getClerkClient = () => { + const secretKey = process.env.CLERK_SECRET_KEY; + if (!secretKey) { + throw new Error('CLERK_SECRET_KEY environment variable is not set'); + } + return createClerkClient({ secretKey }); +}; + +// ───────────────────────────────────────────────────────────────────────────── +// Types +// ───────────────────────────────────────────────────────────────────────────── + +export type ApiKeyValidationResult = + | { + valid: true; + clerkApiKeyId: string; + clerkUserId: string; + instanceId: Id<'instances'>; + } + | { + valid: false; + error: string; + }; + +// ───────────────────────────────────────────────────────────────────────────── +// Actions (public API) +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Validate an API key using Clerk and return the associated instance. + * This is the main entry point for API key validation. + */ +export const validate = action({ + args: { apiKey: v.string() }, + handler: async (ctx, args): Promise => { + const { apiKey } = args; + + // Verify with Clerk + let clerkResult: { id: string; subject: string; name: string | null }; + try { + const clerkClient = getClerkClient(); + clerkResult = await clerkClient.apiKeys.verify(apiKey); + } catch (error) { + // Clerk throws on invalid/revoked/expired keys + const message = error instanceof Error ? error.message : 'Invalid API key'; + return { valid: false, error: message }; + } + + const clerkApiKeyId = clerkResult.id; + const clerkUserId = clerkResult.subject; + + // Get instance by Clerk user ID (using internal query since we don't have auth context) + const instance = await ctx.runQuery(internal['instances/queries'].getByClerkIdInternal, { + clerkId: clerkUserId + }); + + if (!instance) { + return { valid: false, error: 'No instance found for this user' }; + } + + // Track usage + await ctx.runMutation(internal.clerkApiKeys.touchUsage, { + clerkApiKeyId, + clerkUserId, + instanceId: instance._id, + name: clerkResult.name ?? undefined + }); + + // Track analytics asynchronously + await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { + distinctId: clerkUserId, + event: AnalyticsEvents.API_KEY_USED, + properties: { + instanceId: instance._id, + apiKeyId: clerkApiKeyId + } + }); + + return { + valid: true, + clerkApiKeyId, + clerkUserId, + instanceId: instance._id + }; + } +}); + +// ───────────────────────────────────────────────────────────────────────────── +// Internal Mutations +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Update usage tracking for an API key. + * Creates a new record if this is the first use. + */ +export const touchUsage = internalMutation({ + args: { + clerkApiKeyId: v.string(), + clerkUserId: v.string(), + instanceId: v.id('instances'), + name: v.optional(v.string()) + }, + handler: async (ctx, args) => { + const { clerkApiKeyId, clerkUserId, instanceId, name } = args; + + const existing = await ctx.db + .query('apiKeyUsage') + .withIndex('by_clerk_api_key_id', (q) => q.eq('clerkApiKeyId', clerkApiKeyId)) + .first(); + + if (existing) { + await ctx.db.patch(existing._id, { + lastUsedAt: Date.now(), + usageCount: existing.usageCount + 1, + ...(name && { name }) + }); + } else { + await ctx.db.insert('apiKeyUsage', { + clerkApiKeyId, + clerkUserId, + instanceId, + name, + lastUsedAt: Date.now(), + usageCount: 1, + createdAt: Date.now() + }); + } + } +}); + +// ───────────────────────────────────────────────────────────────────────────── +// Internal Queries +// ───────────────────────────────────────────────────────────────────────────── + +/** + * List usage stats for an instance's API keys. + * Used by the UI to show usage information alongside Clerk's key list. + */ +export const listUsageByInstance = internalQuery({ + args: { instanceId: v.id('instances') }, + handler: async (ctx, args) => { + return await ctx.db + .query('apiKeyUsage') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .collect(); + } +}); +``` + +### 1.3 Add Internal Query for Instance Lookup + +**File:** `src/convex/instances/queries.ts` + +Add a new internal query to look up instances by Clerk ID (needed because the existing `getByClerkId` requires auth context, but we're validating via API key): + +```typescript +/** + * Internal query to get instance by Clerk ID + * Used by API key validation when we have the Clerk user ID but no auth context + */ +export const getByClerkIdInternal = internalQuery({ + args: { clerkId: v.string() }, + handler: async (ctx, args) => { + return await ctx.db + .query('instances') + .withIndex('by_clerk_id', (q) => q.eq('clerkId', args.clerkId)) + .first(); + } +}); +``` + +This will be accessible via `instances.internalQueries.getByClerkIdInternal` through the `apiHelpers.ts` re-exports. + +--- + +## Phase 2: Update CLI and MCP Actions + +### 2.1 Update CLI Actions + +**File:** `src/convex/cli.ts` + +Replace all `api.apiKeys.validate` calls with `api.clerkApiKeys.validate`: + +**Before:** + +```typescript +// Validate API key +const validation = await ctx.runQuery(api.apiKeys.validate, { apiKey }); +if (!validation.valid) { + return { ok: false, error: validation.error }; +} +const instanceId = validation.userId; +``` + +**After:** + +```typescript +// Validate API key with Clerk +const validation = await ctx.runAction(api.clerkApiKeys.validate, { apiKey }); +if (!validation.valid) { + return { ok: false, error: validation.error }; +} +const instanceId = validation.instanceId; +``` + +Functions to update in `cli.ts`: + +- `getInstanceStatus` (line ~49) +- `wakeInstance` (line ~106) +- `listProjects` (line ~164) +- `listThreads` (line ~202) +- `getThread` (line ~251) +- `listQuestions` (line ~298) + +### 2.2 Update MCP Actions + +**File:** `src/convex/mcp.ts` + +Same pattern as CLI - replace all `api.apiKeys.validate` calls: + +Functions to update in `mcp.ts`: + +- `ask` (line ~68) +- `listResources` (line ~199) +- `addResource` (line ~256) +- `sync` (line ~331) + +Also remove the `api.apiKeys.touchLastUsed` calls since usage is now tracked in the validation action: + +**Remove these lines:** + +```typescript +// Remove - no longer needed +await ctx.runMutation(api.apiKeys.touchLastUsed, { keyId: validation.keyId }); +``` + +--- + +## Phase 3: Update UI to Use Clerk's API + +The app uses `@clerk/clerk-js` directly. We'll update the existing UI to use Clerk's Frontend API for API key management instead of Convex mutations. + +### 3.1 Update Settings Page + +**File:** `src/routes/app/settings/+page.svelte` + +**Changes to make:** + +1. **Remove Convex API key imports and queries:** + +```typescript +// REMOVE these: +const apiKeysQuery = $derived(instanceId ? useQuery(api.apiKeys.list, {}) : null); +const apiKeys = $derived(apiKeysQuery?.data ?? []); +``` + +2. **Add Clerk API key state and functions:** + +```svelte + +``` + +3. **Update the template to use `clerkApiKeys` instead of `apiKeys`:** + +Replace references to `apiKeys` with `clerkApiKeys` and `apiKeysQuery?.isLoading` with `isLoadingKeys`. + +The existing HTML structure can remain largely the same - just update the data source: + +```svelte + +{#each apiKeys as key} + + +{#each clerkApiKeys as key} +``` + +```svelte + +{#if apiKeysQuery?.isLoading} + + +{#if isLoadingKeys} +``` + +4. **Update key display (Clerk doesn't store keyPrefix):** + +Since Clerk doesn't expose a key prefix, update the display: + +```svelte + +{key.keyPrefix}... + + +{key.id} +``` + +### 3.2 Update Questions Settings Page + +**File:** `src/routes/app/settings/questions/+page.svelte` + +Apply the same changes as the main settings page: + +- Remove Convex `apiKeysQuery` and related state +- Add Clerk API key loading/create/revoke functions +- Update template to use `clerkApiKeys` + +### 3.3 Type Definitions + +Clerk's `APIKeyResource` type (from `@clerk/types`) has these relevant fields: + +- `id: string` - The API key ID (e.g., "ak_xxx") +- `name: string | null` - User-provided name +- `subject: string` - The user/org ID this key belongs to +- `createdAt: number` - Creation timestamp +- `secret: string` - **Only available on create response!** + +You may want to add a type for the UI: + +```typescript +interface ClerkApiKey { + id: string; + name: string | null; + createdAt: number; +} +``` + +### 3.4 Display Usage Stats (Optional Enhancement) + +To show usage stats (last used, usage count) alongside Clerk's API keys, query the `apiKeyUsage` table: + +```svelte + + + +{#each clerkApiKeys as key} + {@const usage = usageByKeyId().get(key.id)} +
+
+

{key.name ?? 'Unnamed key'}

+

+ Created {formatDate(key.createdAt)} + {#if usage?.lastUsedAt} + | Last used {formatDate(usage.lastUsedAt)} + | {usage.usageCount} uses + {:else} + | Never used + {/if} +

+
+ +
+{/each} +``` + +**Note:** This requires adding a public query to `clerkApiKeys.ts`: + +```typescript +/** + * List usage stats for the authenticated user's API keys. + * Public query - requires auth. + */ +export const listUsageForUser = query({ + args: {}, + handler: async (ctx) => { + const identity = await ctx.auth.getUserIdentity(); + if (!identity) return []; + + const instance = await ctx.db + .query('instances') + .withIndex('by_clerk_id', (q) => q.eq('clerkId', identity.subject)) + .first(); + + if (!instance) return []; + + return await ctx.db + .query('apiKeyUsage') + .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) + .collect(); + } +}); +``` + +--- + +## Phase 4: Cleanup + +### 4.1 Remove Old API Keys File + +**Delete:** `src/convex/apiKeys.ts` + +### 4.2 Update Schema + +**File:** `src/convex/schema.ts` + +Remove the old `apiKeys` table definition (lines 98-109): + +```typescript +// DELETE THIS: +apiKeys: defineTable({ + instanceId: v.id('instances'), + name: v.string(), + keyHash: v.string(), + keyPrefix: v.string(), + createdAt: v.number(), + lastUsedAt: v.optional(v.number()), + revokedAt: v.optional(v.number()), + usageCount: v.optional(v.number()) +}) + .index('by_instance', ['instanceId']) + .index('by_key_hash', ['keyHash']), +``` + +### 4.3 Remove Auth Helper + +**File:** `src/convex/authHelpers.ts` + +Remove `requireApiKeyOwnership` function (lines 120-147) as it's no longer needed. + +### 4.4 Clean Up Imports + +Search for and remove any imports of the old `apiKeys` module: + +```typescript +// Remove these imports wherever they appear: +import { api } from './_generated/api'; +// Specifically: api.apiKeys.* +``` + +### 4.5 Data Migration + +Create a migration to clean up old API key data: + +**File:** `src/convex/migrations.ts` (add to existing or create) + +```typescript +import { internalMutation } from './_generated/server'; + +/** + * Migration: Remove old apiKeys data after Clerk migration. + * Run this after confirming the new system works. + */ +export const cleanupOldApiKeys = internalMutation({ + handler: async (ctx) => { + const oldKeys = await ctx.db.query('apiKeys').collect(); + let deleted = 0; + for (const key of oldKeys) { + await ctx.db.delete(key._id); + deleted++; + } + return { deleted }; + } +}); +``` + +--- + +## Environment Variables + +Ensure these are set in your Convex environment: + +```bash +# Already should exist +CLERK_SECRET_KEY=sk_live_xxx # or sk_test_xxx for development + +# Verify in Convex dashboard under Settings > Environment Variables +``` + +--- + +## Testing Checklist + +1. **Create API key in Clerk UI** + - Go to settings page + - Create a new API key + - Copy the secret (only shown once!) + +2. **Test CLI authentication** + + ```bash + btca remote link --key + btca remote status + ``` + +3. **Test MCP authentication** + - Configure MCP client with new API key + - Call `listResources` tool + - Call `ask` tool + +4. **Verify usage tracking** + - Check Convex dashboard for `apiKeyUsage` table entries + - Verify `usageCount` increments on each use + +5. **Test key revocation** + - Revoke key in Clerk UI + - Verify CLI/MCP calls fail with "Invalid API key" + +--- + +## Rollback Plan + +If issues arise: + +1. Keep the old `apiKeys.ts` file until migration is confirmed working +2. The old table can remain in schema during transition +3. To rollback: revert imports in `cli.ts` and `mcp.ts` to use `api.apiKeys.validate` + +--- + +## File Change Summary + +| File | Action | +| ------------------------------------------------ | ----------------------------------------------------- | +| `src/convex/schema.ts` | Add `apiKeyUsage` table, later remove `apiKeys` table | +| `src/convex/clerkApiKeys.ts` | **CREATE** - New Clerk verification helper | +| `src/convex/instances/queries.ts` | Add `getByClerkIdInternal` query | +| `src/convex/cli.ts` | Update validation calls | +| `src/convex/mcp.ts` | Update validation calls, remove `touchLastUsed` calls | +| `src/convex/apiKeys.ts` | **DELETE** after migration | +| `src/convex/authHelpers.ts` | Remove `requireApiKeyOwnership` | +| `src/routes/app/settings/+page.svelte` | Replace custom UI with Clerk API calls | +| `src/routes/app/settings/questions/+page.svelte` | Replace custom UI with Clerk API calls | +| `package.json` | Add `@clerk/backend` dependency | diff --git a/apps/web/package.json b/apps/web/package.json index 87cbb049..b121eabb 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -36,8 +36,11 @@ "dependencies": { "@ai-sdk/openai-compatible": "^2.0.13", "@btca/shared": "workspace:*", + "btca-sandbox": "workspace:*", + "@clerk/backend": "^2.29.5", "@clerk/clerk-js": "^5.120.0", "@clerk/types": "^4.101.11", + "@convex-dev/migrations": "^0.3.1", "@daytonaio/sdk": "^0.130.0", "@lucide/svelte": "^0.562.0", "@shikijs/langs": "^3.20.0", diff --git a/apps/web/src/convex/_generated/api.d.ts b/apps/web/src/convex/_generated/api.d.ts index 5bac56ee..ab7ffc1d 100644 --- a/apps/web/src/convex/_generated/api.d.ts +++ b/apps/web/src/convex/_generated/api.d.ts @@ -11,15 +11,22 @@ import type * as analytics from "../analytics.js"; import type * as analyticsEvents from "../analyticsEvents.js"; import type * as apiHelpers from "../apiHelpers.js"; -import type * as apiKeys from "../apiKeys.js"; import type * as authHelpers from "../authHelpers.js"; +import type * as clerkApiKeys from "../clerkApiKeys.js"; +import type * as clerkApiKeysQueries from "../clerkApiKeysQueries.js"; +import type * as cli from "../cli.js"; +import type * as cliInternal from "../cliInternal.js"; import type * as crons from "../crons.js"; import type * as http from "../http.js"; import type * as instances_actions from "../instances/actions.js"; import type * as instances_mutations from "../instances/mutations.js"; import type * as instances_queries from "../instances/queries.js"; import type * as mcp from "../mcp.js"; +import type * as mcpInternal from "../mcpInternal.js"; +import type * as mcpQuestions from "../mcpQuestions.js"; import type * as messages from "../messages.js"; +import type * as migrations from "../migrations.js"; +import type * as projects from "../projects.js"; import type * as resources from "../resources.js"; import type * as scheduled_queries from "../scheduled/queries.js"; import type * as scheduled_updates from "../scheduled/updates.js"; @@ -41,15 +48,22 @@ declare const fullApi: ApiFromModules<{ analytics: typeof analytics; analyticsEvents: typeof analyticsEvents; apiHelpers: typeof apiHelpers; - apiKeys: typeof apiKeys; authHelpers: typeof authHelpers; + clerkApiKeys: typeof clerkApiKeys; + clerkApiKeysQueries: typeof clerkApiKeysQueries; + cli: typeof cli; + cliInternal: typeof cliInternal; crons: typeof crons; http: typeof http; "instances/actions": typeof instances_actions; "instances/mutations": typeof instances_mutations; "instances/queries": typeof instances_queries; mcp: typeof mcp; + mcpInternal: typeof mcpInternal; + mcpQuestions: typeof mcpQuestions; messages: typeof messages; + migrations: typeof migrations; + projects: typeof projects; resources: typeof resources; "scheduled/queries": typeof scheduled_queries; "scheduled/updates": typeof scheduled_updates; @@ -88,4 +102,91 @@ export declare const internal: FilterApi< FunctionReference >; -export declare const components: {}; +export declare const components: { + migrations: { + lib: { + cancel: FunctionReference< + "mutation", + "internal", + { name: string }, + { + batchSize?: number; + cursor?: string | null; + error?: string; + isDone: boolean; + latestEnd?: number; + latestStart: number; + name: string; + next?: Array; + processed: number; + state: "inProgress" | "success" | "failed" | "canceled" | "unknown"; + } + >; + cancelAll: FunctionReference< + "mutation", + "internal", + { sinceTs?: number }, + Array<{ + batchSize?: number; + cursor?: string | null; + error?: string; + isDone: boolean; + latestEnd?: number; + latestStart: number; + name: string; + next?: Array; + processed: number; + state: "inProgress" | "success" | "failed" | "canceled" | "unknown"; + }> + >; + clearAll: FunctionReference< + "mutation", + "internal", + { before?: number }, + null + >; + getStatus: FunctionReference< + "query", + "internal", + { limit?: number; names?: Array }, + Array<{ + batchSize?: number; + cursor?: string | null; + error?: string; + isDone: boolean; + latestEnd?: number; + latestStart: number; + name: string; + next?: Array; + processed: number; + state: "inProgress" | "success" | "failed" | "canceled" | "unknown"; + }> + >; + migrate: FunctionReference< + "mutation", + "internal", + { + batchSize?: number; + cursor?: string | null; + dryRun: boolean; + fnHandle: string; + name: string; + next?: Array<{ fnHandle: string; name: string }>; + oneBatchOnly?: boolean; + }, + { + batchSize?: number; + cursor?: string | null; + error?: string; + isDone: boolean; + latestEnd?: number; + latestStart: number; + name: string; + next?: Array; + processed: number; + state: "inProgress" | "success" | "failed" | "canceled" | "unknown"; + } + >; + }; + }; +}; diff --git a/apps/web/src/convex/analyticsEvents.ts b/apps/web/src/convex/analyticsEvents.ts index 652a9c56..c858cb33 100644 --- a/apps/web/src/convex/analyticsEvents.ts +++ b/apps/web/src/convex/analyticsEvents.ts @@ -27,6 +27,9 @@ export const AnalyticsEvents = { THREAD_DELETED: 'thread_deleted', THREAD_CLEARED: 'thread_cleared', + PROJECT_CREATED: 'project_created', + PROJECT_DELETED: 'project_deleted', + RESOURCE_ADDED: 'resource_added', RESOURCE_REMOVED: 'resource_removed', diff --git a/apps/web/src/convex/apiKeys.ts b/apps/web/src/convex/apiKeys.ts deleted file mode 100644 index 9dc84427..00000000 --- a/apps/web/src/convex/apiKeys.ts +++ /dev/null @@ -1,172 +0,0 @@ -import { mutation, query } from './_generated/server'; -import { v } from 'convex/values'; - -import { internal } from './_generated/api'; -import { AnalyticsEvents } from './analyticsEvents'; -import { getAuthenticatedInstance, requireApiKeyOwnership } from './authHelpers'; - -/** - * List API keys for the authenticated user's instance - */ -export const list = query({ - args: {}, - handler: async (ctx) => { - const instance = await getAuthenticatedInstance(ctx); - - const keys = await ctx.db - .query('apiKeys') - .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) - .collect(); - - return keys.map((k) => ({ - _id: k._id, - name: k.name, - keyPrefix: k.keyPrefix, - createdAt: k.createdAt, - lastUsedAt: k.lastUsedAt, - revokedAt: k.revokedAt, - usageCount: k.usageCount ?? 0 - })); - } -}); - -/** - * Create an API key for the authenticated user's instance - */ -export const create = mutation({ - args: { - name: v.string() - }, - handler: async (ctx, args) => { - const instance = await getAuthenticatedInstance(ctx); - - const key = generateApiKey(); - const keyHash = await hashApiKey(key); - const keyPrefix = key.slice(0, 8); - - const id = await ctx.db.insert('apiKeys', { - instanceId: instance._id, - name: args.name, - keyHash, - keyPrefix, - createdAt: Date.now() - }); - - await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { - distinctId: instance.clerkId, - event: AnalyticsEvents.API_KEY_CREATED, - properties: { - instanceId: instance._id, - keyId: id, - keyName: args.name - } - }); - - return { id, key }; - } -}); - -function generateApiKey(): string { - const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; - let result = 'btca_'; - for (let i = 0; i < 32; i++) { - result += chars.charAt(Math.floor(Math.random() * chars.length)); - } - return result; -} - -/** - * Revoke an API key owned by the authenticated user - */ -export const revoke = mutation({ - args: { keyId: v.id('apiKeys') }, - handler: async (ctx, args) => { - const { apiKey, instance } = await requireApiKeyOwnership(ctx, args.keyId); - - await ctx.db.patch(args.keyId, { - revokedAt: Date.now() - }); - - await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { - distinctId: instance.clerkId, - event: AnalyticsEvents.API_KEY_REVOKED, - properties: { - instanceId: apiKey.instanceId, - keyId: args.keyId - } - }); - } -}); - -/** - * Validate an API key (internal use - no auth required as this validates the key itself) - */ -export const validate = query({ - args: { apiKey: v.string() }, - handler: async (ctx, args) => { - const keyHash = await hashApiKey(args.apiKey); - - const key = await ctx.db - .query('apiKeys') - .withIndex('by_key_hash', (q) => q.eq('keyHash', keyHash)) - .first(); - - if (!key) { - return { valid: false as const, error: 'Invalid API key' }; - } - - if (key.revokedAt) { - return { valid: false as const, error: 'API key has been revoked' }; - } - - const instance = await ctx.db.get(key.instanceId); - if (!instance) { - return { valid: false as const, error: 'User not found' }; - } - - return { - valid: true as const, - keyId: key._id, - userId: key.instanceId, - clerkId: instance.clerkId - }; - } -}); - -/** - * Touch last used timestamp for an API key (internal use for tracking) - */ -export const touchLastUsed = mutation({ - args: { keyId: v.id('apiKeys') }, - handler: async (ctx, args) => { - const apiKey = await ctx.db.get(args.keyId); - const instance = apiKey ? await ctx.db.get(apiKey.instanceId) : null; - - const currentCount = apiKey?.usageCount ?? 0; - - await ctx.db.patch(args.keyId, { - lastUsedAt: Date.now(), - usageCount: currentCount + 1 - }); - - if (instance && apiKey) { - await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { - distinctId: instance.clerkId, - event: AnalyticsEvents.API_KEY_USED, - properties: { - instanceId: apiKey.instanceId, - keyId: args.keyId, - usageCount: currentCount + 1 - } - }); - } - } -}); - -async function hashApiKey(apiKey: string): Promise { - const encoder = new TextEncoder(); - const data = encoder.encode(apiKey); - const hashBuffer = await crypto.subtle.digest('SHA-256', data); - const hashArray = Array.from(new Uint8Array(hashBuffer)); - return hashArray.map((b) => b.toString(16).padStart(2, '0')).join(''); -} diff --git a/apps/web/src/convex/authHelpers.ts b/apps/web/src/convex/authHelpers.ts index 02564ffa..9f65df04 100644 --- a/apps/web/src/convex/authHelpers.ts +++ b/apps/web/src/convex/authHelpers.ts @@ -116,36 +116,6 @@ export async function requireMessageOwnership( return { message, thread, instance }; } -/** - * Validates that the authenticated user owns the API key (via its instance). - * Returns the API key and instance if ownership is confirmed. - */ -export async function requireApiKeyOwnership( - ctx: DbCtx, - keyId: Id<'apiKeys'> -): Promise<{ apiKey: Doc<'apiKeys'>; instance: Doc<'instances'> }> { - const identity = await ctx.auth.getUserIdentity(); - if (!identity) { - throw new Error('Unauthorized: Authentication required'); - } - - const apiKey = await ctx.db.get(keyId); - if (!apiKey) { - throw new Error('API key not found'); - } - - const instance = await ctx.db.get(apiKey.instanceId); - if (!instance) { - throw new Error('Instance not found'); - } - - if (instance.clerkId !== identity.subject) { - throw new Error('Unauthorized: Access denied'); - } - - return { apiKey, instance }; -} - /** * Validates that the authenticated user owns the user resource (via its instance). * Returns the resource and instance if ownership is confirmed. diff --git a/apps/web/src/convex/clerkApiKeys.ts b/apps/web/src/convex/clerkApiKeys.ts new file mode 100644 index 00000000..58cbb7ee --- /dev/null +++ b/apps/web/src/convex/clerkApiKeys.ts @@ -0,0 +1,126 @@ +'use node'; + +import { createClerkClient } from '@clerk/backend'; +import type { FunctionReference } from 'convex/server'; +import { v } from 'convex/values'; + +import { internal } from './_generated/api'; +import type { Id } from './_generated/dataModel'; +import { action } from './_generated/server'; +import { AnalyticsEvents } from './analyticsEvents'; +import { instances } from './apiHelpers'; + +// Type for internal functions (will be auto-generated after first deploy) +type InternalClerkApiKeysQueries = { + touchUsage: FunctionReference< + 'mutation', + 'internal', + { + clerkApiKeyId: string; + clerkUserId: string; + instanceId: Id<'instances'>; + name?: string; + }, + void + >; +}; + +// Access internal functions for the queries module +const clerkApiKeysQueriesInternal = internal as unknown as { + clerkApiKeysQueries: InternalClerkApiKeysQueries; + analytics: { + trackEvent: FunctionReference< + 'action', + 'internal', + { distinctId: string; event: string; properties: Record }, + void + >; + }; +}; + +const getClerkClient = () => { + const secretKey = process.env.CLERK_SECRET_KEY; + if (!secretKey) { + throw new Error('CLERK_SECRET_KEY environment variable is not set'); + } + return createClerkClient({ secretKey }); +}; + +// ───────────────────────────────────────────────────────────────────────────── +// Types +// ───────────────────────────────────────────────────────────────────────────── + +export type ApiKeyValidationResult = + | { + valid: true; + clerkApiKeyId: string; + clerkUserId: string; + instanceId: Id<'instances'>; + } + | { + valid: false; + error: string; + }; + +// ───────────────────────────────────────────────────────────────────────────── +// Actions (public API) +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Validate an API key using Clerk and return the associated instance. + * This is the main entry point for API key validation. + */ +export const validate = action({ + args: { apiKey: v.string() }, + handler: async (ctx, args): Promise => { + const { apiKey } = args; + + // Verify with Clerk + let clerkResult: { id: string; subject: string; name: string | null }; + try { + const clerkClient = getClerkClient(); + clerkResult = await clerkClient.apiKeys.verify(apiKey); + } catch (error) { + // Clerk throws on invalid/revoked/expired keys + const message = error instanceof Error ? error.message : 'Invalid API key'; + return { valid: false, error: message }; + } + + const clerkApiKeyId = clerkResult.id; + const clerkUserId = clerkResult.subject; + + // Get instance by Clerk user ID (using internal query since we don't have auth context) + const instance = await ctx.runQuery(instances.internalQueries.getByClerkIdInternal, { + clerkId: clerkUserId + }); + + if (!instance) { + return { valid: false, error: 'No instance found for this user' }; + } + + // Track usage (call the mutation in clerkApiKeysQueries module) + await ctx.runMutation(clerkApiKeysQueriesInternal.clerkApiKeysQueries.touchUsage, { + clerkApiKeyId, + clerkUserId, + instanceId: instance._id, + name: clerkResult.name ?? undefined + }); + + // Track analytics asynchronously + await ctx.scheduler.runAfter(0, clerkApiKeysQueriesInternal.analytics.trackEvent, { + distinctId: clerkUserId, + event: AnalyticsEvents.API_KEY_USED, + properties: { + instanceId: instance._id, + apiKeyId: clerkApiKeyId + } + }); + + return { + valid: true, + clerkApiKeyId, + clerkUserId, + instanceId: instance._id + }; + } +}); diff --git a/apps/web/src/convex/clerkApiKeysQueries.ts b/apps/web/src/convex/clerkApiKeysQueries.ts new file mode 100644 index 00000000..e43d16c4 --- /dev/null +++ b/apps/web/src/convex/clerkApiKeysQueries.ts @@ -0,0 +1,92 @@ +import { v } from 'convex/values'; + +import { internalMutation, internalQuery, query } from './_generated/server'; + +// ───────────────────────────────────────────────────────────────────────────── +// Internal Mutations +// ───────────────────────────────────────────────────────────────────────────── + +/** + * Update usage tracking for an API key. + * Creates a new record if this is the first use. + */ +export const touchUsage = internalMutation({ + args: { + clerkApiKeyId: v.string(), + clerkUserId: v.string(), + instanceId: v.id('instances'), + name: v.optional(v.string()) + }, + handler: async (ctx, args) => { + const { clerkApiKeyId, clerkUserId, instanceId, name } = args; + + const existing = await ctx.db + .query('apiKeyUsage') + .withIndex('by_clerk_api_key_id', (q) => q.eq('clerkApiKeyId', clerkApiKeyId)) + .first(); + + if (existing) { + await ctx.db.patch(existing._id, { + lastUsedAt: Date.now(), + usageCount: existing.usageCount + 1, + ...(name && { name }) + }); + } else { + await ctx.db.insert('apiKeyUsage', { + clerkApiKeyId, + clerkUserId, + instanceId, + name, + lastUsedAt: Date.now(), + usageCount: 1, + createdAt: Date.now() + }); + } + } +}); + +// ───────────────────────────────────────────────────────────────────────────── +// Internal Queries +// ───────────────────────────────────────────────────────────────────────────── + +/** + * List usage stats for an instance's API keys. + * Used by the UI to show usage information alongside Clerk's key list. + */ +export const listUsageByInstance = internalQuery({ + args: { instanceId: v.id('instances') }, + handler: async (ctx, args) => { + return await ctx.db + .query('apiKeyUsage') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .collect(); + } +}); + +// ───────────────────────────────────────────────────────────────────────────── +// Public Queries +// ───────────────────────────────────────────────────────────────────────────── + +/** + * List usage stats for the authenticated user's API keys. + * Public query - requires auth. + */ +export const listUsageForUser = query({ + args: {}, + handler: async (ctx) => { + const identity = await ctx.auth.getUserIdentity(); + if (!identity) return []; + + const instance = await ctx.db + .query('instances') + .withIndex('by_clerk_id', (q) => q.eq('clerkId', identity.subject)) + .first(); + + if (!instance) return []; + + return await ctx.db + .query('apiKeyUsage') + .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) + .collect(); + } +}); diff --git a/apps/web/src/convex/cli.ts b/apps/web/src/convex/cli.ts new file mode 100644 index 00000000..983e4017 --- /dev/null +++ b/apps/web/src/convex/cli.ts @@ -0,0 +1,335 @@ +'use node'; + +import { v } from 'convex/values'; + +import { api, internal } from './_generated/api'; +import type { Doc, Id } from './_generated/dataModel'; +import { action } from './_generated/server'; +import { instances } from './apiHelpers'; +import type { ApiKeyValidationResult } from './clerkApiKeys'; + +const instanceActions = instances.actions; + +// ───────────────────────────────────────────────────────────────────────────── +// Actions (public endpoints) +// Internal queries are in cliInternal.ts (can't use Node.js runtime) +// ───────────────────────────────────────────────────────────────────────────── + +type StatusResult = + | { ok: false; error: string } + | { + ok: true; + instance: { + _id: string; + state: string; + serverUrl: string | undefined; + btcaVersion: string | undefined; + subscriptionPlan: string | undefined; + }; + project?: { + _id: string; + name: string; + model: string | undefined; + isDefault: boolean; + createdAt: number; + }; + }; + +/** + * Get instance status for CLI + */ +export const getInstanceStatus = action({ + args: { + apiKey: v.string(), + project: v.optional(v.string()) + }, + handler: async (ctx, args): Promise => { + const { apiKey, project: projectName } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get instance + const instance = await ctx.runQuery(instances.internalQueries.getInternal, { id: instanceId }); + if (!instance) { + return { ok: false, error: 'Instance not found' }; + } + + // Get project if specified + let project: Doc<'projects'> | null = null; + if (projectName) { + project = await ctx.runQuery(internal.projects.getByInstanceAndName, { + instanceId, + name: projectName + }); + } + + return { + ok: true, + instance: { + _id: instance._id as string, + state: instance.state, + serverUrl: instance.serverUrl, + btcaVersion: instance.btcaVersion, + subscriptionPlan: instance.subscriptionPlan + }, + project: project + ? { + _id: project._id as string, + name: project.name, + model: project.model, + isDefault: project.isDefault, + createdAt: project.createdAt + } + : undefined + }; + } +}); + +type WakeResult = { ok: false; error: string } | { ok: true; serverUrl: string }; + +/** + * Wake the sandbox for CLI + */ +export const wakeInstance = action({ + args: { + apiKey: v.string() + }, + handler: async (ctx, args): Promise => { + const { apiKey } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get instance + const instance = await ctx.runQuery(instances.internalQueries.getInternal, { id: instanceId }); + if (!instance) { + return { ok: false, error: 'Instance not found' }; + } + + if (!instance.sandboxId) { + return { ok: false, error: 'Instance does not have a sandbox' }; + } + + // Wake it if not running + if (instance.state !== 'running' || !instance.serverUrl) { + try { + const result = await ctx.runAction(instanceActions.wake, { instanceId }); + return { ok: true, serverUrl: result.serverUrl }; + } catch (err) { + return { + ok: false, + error: err instanceof Error ? err.message : 'Failed to wake sandbox' + }; + } + } + + return { ok: true, serverUrl: instance.serverUrl }; + } +}); + +type ProjectsResult = + | { ok: false; error: string } + | { + ok: true; + projects: Array<{ + _id: string; + name: string; + model: string | undefined; + isDefault: boolean; + createdAt: number; + }>; + }; + +/** + * List projects for CLI + */ +export const listProjects = action({ + args: { + apiKey: v.string() + }, + handler: async (ctx, args): Promise => { + const { apiKey } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get projects + const projects = await ctx.runQuery(internal.cliInternal.listProjectsInternal, { instanceId }); + + return { ok: true, projects }; + } +}); + +type ThreadsResult = + | { ok: false; error: string } + | { + ok: true; + threads: Array<{ + _id: string; + title: string | undefined; + createdAt: number; + lastActivityAt: number; + }>; + }; + +/** + * List threads for CLI + */ +export const listThreads = action({ + args: { + apiKey: v.string(), + project: v.optional(v.string()) + }, + handler: async (ctx, args): Promise => { + const { apiKey, project: projectName } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get threads + const threads = await ctx.runQuery(internal.cliInternal.listThreadsInternal, { + instanceId, + projectName + }); + + return { ok: true, threads }; + } +}); + +type ThreadResult = + | { ok: false; error: string } + | { + ok: true; + thread: { + _id: string; + title: string | undefined; + createdAt: number; + lastActivityAt: number; + }; + messages: Array<{ + _id: string; + threadId: string; + role: string; + content: string; + resources: string[] | undefined; + createdAt: number; + }>; + }; + +/** + * Get thread with messages for CLI + */ +export const getThread = action({ + args: { + apiKey: v.string(), + threadId: v.string() + }, + handler: async (ctx, args): Promise => { + const { apiKey, threadId } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get thread + const result = await ctx.runQuery(internal.cliInternal.getThreadInternal, { + instanceId, + threadId + }); + + if (!result) { + return { ok: false, error: 'Thread not found' }; + } + + return { ok: true, thread: result.thread, messages: result.messages }; + } +}); + +type QuestionsResult = + | { ok: false; error: string } + | { + ok: true; + questions: Array<{ + _id: string; + projectId: string; + question: string; + resources: string[]; + answer: string; + createdAt: number; + }>; + }; + +/** + * List MCP questions for CLI + */ +export const listQuestions = action({ + args: { + apiKey: v.string(), + project: v.string() + }, + handler: async (ctx, args): Promise => { + const { apiKey, project: projectName } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get project + const project = await ctx.runQuery(internal.projects.getByInstanceAndName, { + instanceId, + name: projectName + }); + + if (!project) { + return { ok: false, error: `Project "${projectName}" not found` }; + } + + // Get questions + const questions = await ctx.runQuery(internal.cliInternal.listQuestionsInternal, { + projectId: project._id + }); + + return { ok: true, questions }; + } +}); diff --git a/apps/web/src/convex/cliInternal.ts b/apps/web/src/convex/cliInternal.ts new file mode 100644 index 00000000..b58a139f --- /dev/null +++ b/apps/web/src/convex/cliInternal.ts @@ -0,0 +1,145 @@ +import { v } from 'convex/values'; + +import type { Id } from './_generated/dataModel'; +import { internalQuery } from './_generated/server'; + +// ───────────────────────────────────────────────────────────────────────────── +// Internal queries for CLI (must be in non-Node.js runtime) +// ───────────────────────────────────────────────────────────────────────────── + +/** + * List projects for an instance (internal) + */ +export const listProjectsInternal = internalQuery({ + args: { + instanceId: v.id('instances') + }, + handler: async (ctx, args) => { + const projects = await ctx.db + .query('projects') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .collect(); + + return projects + .map((p) => ({ + _id: p._id as string, + name: p.name, + model: p.model, + isDefault: p.isDefault, + createdAt: p.createdAt + })) + .sort((a, b) => { + if (a.isDefault && !b.isDefault) return -1; + if (!a.isDefault && b.isDefault) return 1; + return b.createdAt - a.createdAt; + }); + } +}); + +/** + * List threads for an instance (internal) + */ +export const listThreadsInternal = internalQuery({ + args: { + instanceId: v.id('instances'), + projectName: v.optional(v.string()) + }, + handler: async (ctx, args) => { + let threads = await ctx.db + .query('threads') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .collect(); + + // Filter by project if specified + if (args.projectName) { + const project = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', args.instanceId).eq('name', args.projectName!) + ) + .first(); + + if (project) { + threads = threads.filter((t) => t.projectId === project._id); + } else { + return []; + } + } + + return threads + .map((t) => ({ + _id: t._id as string, + title: t.title, + createdAt: t.createdAt, + lastActivityAt: t.lastActivityAt + })) + .sort((a, b) => b.lastActivityAt - a.lastActivityAt); + } +}); + +/** + * Get thread with messages (internal) + */ +export const getThreadInternal = internalQuery({ + args: { + instanceId: v.id('instances'), + threadId: v.string() + }, + handler: async (ctx, args) => { + // Get thread + const thread = await ctx.db.get(args.threadId as Id<'threads'>); + if (!thread || thread.instanceId !== args.instanceId) { + return null; + } + + const messages = await ctx.db + .query('messages') + .withIndex('by_thread', (q) => q.eq('threadId', thread._id)) + .collect(); + + return { + thread: { + _id: thread._id as string, + title: thread.title, + createdAt: thread.createdAt, + lastActivityAt: thread.lastActivityAt + }, + messages: messages + .map((m) => ({ + _id: m._id as string, + threadId: m.threadId as string, + role: m.role, + content: typeof m.content === 'string' ? m.content : JSON.stringify(m.content), + resources: m.resources, + createdAt: m.createdAt + })) + .sort((a, b) => a.createdAt - b.createdAt) + }; + } +}); + +/** + * List MCP questions for a project (internal) + */ +export const listQuestionsInternal = internalQuery({ + args: { + projectId: v.id('projects') + }, + handler: async (ctx, args) => { + const questions = await ctx.db + .query('mcpQuestions') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + return questions + .map((q) => ({ + _id: q._id as string, + projectId: q.projectId as string, + question: q.question, + resources: q.resources, + answer: q.answer, + createdAt: q.createdAt + })) + .sort((a, b) => b.createdAt - a.createdAt); + } +}); diff --git a/apps/web/src/convex/convex.config.ts b/apps/web/src/convex/convex.config.ts new file mode 100644 index 00000000..eecc7461 --- /dev/null +++ b/apps/web/src/convex/convex.config.ts @@ -0,0 +1,7 @@ +import { defineApp } from 'convex/server'; +import migrations from '@convex-dev/migrations/convex.config.js'; + +const app = defineApp(); +app.use(migrations); + +export default app; diff --git a/apps/web/src/convex/instances/actions.ts b/apps/web/src/convex/instances/actions.ts index 69d3929d..9dc6441e 100644 --- a/apps/web/src/convex/instances/actions.ts +++ b/apps/web/src/convex/instances/actions.ts @@ -1,6 +1,7 @@ 'use node'; import { Daytona, type Sandbox } from '@daytonaio/sdk'; +import { BTCA_SNAPSHOT_NAME } from 'btca-sandbox/shared'; import { v } from 'convex/values'; import { api, internal } from '../_generated/api'; @@ -11,8 +12,6 @@ import { instances } from '../apiHelpers'; const instanceQueries = instances.queries; const instanceMutations = instances.mutations; - -const BTCA_SNAPSHOT_NAME = 'btca-app-sandbox'; const BTCA_SERVER_PORT = 3000; const SANDBOX_IDLE_MINUTES = 2; const DEFAULT_MODEL = 'claude-haiku-4-5'; @@ -401,15 +400,57 @@ async function requireAuthenticatedInstance(ctx: ActionCtx): Promise, + instance: Doc<'instances'> +): Promise<{ sandbox: Sandbox; serverUrl: string }> { + requireEnv('OPENCODE_API_KEY'); + + const resources = await getResourceConfigs(ctx, instanceId); + const daytona = getDaytona(); + const sandbox = await daytona.create({ + snapshot: BTCA_SNAPSHOT_NAME, + autoStopInterval: SANDBOX_IDLE_MINUTES, + envVars: { + NODE_ENV: 'production', + OPENCODE_API_KEY: requireEnv('OPENCODE_API_KEY') + }, + public: true + }); + + await uploadBtcaConfig(sandbox, resources); + const serverUrl = await startBtcaServer(sandbox); + + const versions = await getInstalledVersions(sandbox); + + await ctx.runMutation(instanceMutations.setProvisioned, { + instanceId, + sandboxId: sandbox.id, + btcaVersion: versions.btcaVersion, + opencodeVersion: versions.opencodeVersion + }); + + await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { + distinctId: instance.clerkId, + event: AnalyticsEvents.SANDBOX_PROVISIONED, + properties: { + instanceId, + sandboxId: sandbox.id, + btcaVersion: versions.btcaVersion, + opencodeVersion: versions.opencodeVersion, + createdDuringWake: true + } + }); + + return { sandbox, serverUrl }; +} + async function wakeInstanceInternal( ctx: ActionCtx, instanceId: Id<'instances'> ): Promise<{ serverUrl: string }> { const instance = await requireInstance(ctx, instanceId); - if (!instance.sandboxId) { - throw new Error('Instance does not have a sandbox to wake'); - } - const wakeStartedAt = Date.now(); await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { @@ -417,7 +458,7 @@ async function wakeInstanceInternal( event: AnalyticsEvents.SANDBOX_WAKE_STARTED, properties: { instanceId, - sandboxId: instance.sandboxId + sandboxId: instance.sandboxId ?? null } }); @@ -427,13 +468,23 @@ async function wakeInstanceInternal( }); try { - const resources = await getResourceConfigs(ctx, instanceId); - const daytona = getDaytona(); - const sandbox = await daytona.get(instance.sandboxId); + let serverUrl: string; + let sandboxId: string; - await ensureSandboxStarted(sandbox); - await uploadBtcaConfig(sandbox, resources); - const serverUrl = await startBtcaServer(sandbox); + if (!instance.sandboxId) { + const result = await createSandboxFromScratch(ctx, instanceId, instance); + serverUrl = result.serverUrl; + sandboxId = result.sandbox.id; + } else { + const resources = await getResourceConfigs(ctx, instanceId); + const daytona = getDaytona(); + const sandbox = await daytona.get(instance.sandboxId); + + await ensureSandboxStarted(sandbox); + await uploadBtcaConfig(sandbox, resources); + serverUrl = await startBtcaServer(sandbox); + sandboxId = instance.sandboxId; + } await ctx.runMutation(instanceMutations.setServerUrl, { instanceId, serverUrl }); await ctx.runMutation(instanceMutations.updateState, { instanceId, state: 'running' }); @@ -445,8 +496,9 @@ async function wakeInstanceInternal( event: AnalyticsEvents.SANDBOX_WOKE, properties: { instanceId, - sandboxId: instance.sandboxId, - durationMs + sandboxId, + durationMs, + createdNewSandbox: !instance.sandboxId } }); diff --git a/apps/web/src/convex/instances/queries.ts b/apps/web/src/convex/instances/queries.ts index e8c7e5a8..7562efaf 100644 --- a/apps/web/src/convex/instances/queries.ts +++ b/apps/web/src/convex/instances/queries.ts @@ -67,6 +67,20 @@ export const getByClerkId = query({ } }); +/** + * Internal query to get instance by Clerk ID + * Used by API key validation when we have the Clerk user ID but no auth context + */ +export const getByClerkIdInternal = internalQuery({ + args: { clerkId: v.string() }, + handler: async (ctx, args) => { + return await ctx.db + .query('instances') + .withIndex('by_clerk_id', (q) => q.eq('clerkId', args.clerkId)) + .first(); + } +}); + /** * Get instance status for the authenticated user */ diff --git a/apps/web/src/convex/mcp.ts b/apps/web/src/convex/mcp.ts index 008a3fa2..2fac111e 100644 --- a/apps/web/src/convex/mcp.ts +++ b/apps/web/src/convex/mcp.ts @@ -2,36 +2,148 @@ import { v } from 'convex/values'; -import { api } from './_generated/api'; +import { api, internal } from './_generated/api'; +import type { Id } from './_generated/dataModel'; import { action } from './_generated/server'; import { instances } from './apiHelpers'; +import type { ApiKeyValidationResult } from './clerkApiKeys'; const instanceActions = instances.actions; const instanceMutations = instances.mutations; type AskResult = { ok: true; text: string } | { ok: false; error: string }; +function stripJsonComments(content: string): string { + let result = ''; + let inString = false; + let inLineComment = false; + let inBlockComment = false; + let i = 0; + + while (i < content.length) { + const char = content[i]; + const next = content[i + 1]; + + if (inLineComment) { + if (char === '\n') { + inLineComment = false; + result += char; + } + i++; + continue; + } + + if (inBlockComment) { + if (char === '*' && next === '/') { + inBlockComment = false; + i += 2; + continue; + } + i++; + continue; + } + + if (inString) { + result += char; + if (char === '\\' && i + 1 < content.length) { + result += content[i + 1]; + i += 2; + continue; + } + if (char === '"') { + inString = false; + } + i++; + continue; + } + + if (char === '"') { + inString = true; + result += char; + i++; + continue; + } + + if (char === '/' && next === '/') { + inLineComment = true; + i += 2; + continue; + } + + if (char === '/' && next === '*') { + inBlockComment = true; + i += 2; + continue; + } + + result += char; + i++; + } + + return result.replace(/,(\s*[}\]])/g, '$1'); +} + +/** + * Get or create a project by name for an instance. + * If project name is not provided or is "default", returns/creates the default project. + */ +async function getOrCreateProject( + ctx: { + runQuery: (typeof action)['prototype']['runQuery']; + runMutation: (typeof action)['prototype']['runMutation']; + }, + instanceId: Id<'instances'>, + projectName?: string +): Promise> { + const name = projectName || 'default'; + + // Try to find existing project + const existing = await ctx.runQuery(internal.projects.getByInstanceAndName, { + instanceId, + name + }); + + if (existing) { + return existing._id; + } + + // Create the project (this handles the default case specially) + const isDefault = name === 'default'; + const projectId = await ctx.runMutation(internal.mcpInternal.createProjectInternal, { + instanceId, + name, + isDefault + }); + + return projectId; +} + /** * MCP ask action - called from the SvelteKit MCP endpoint. * Authentication is done via API key - the caller must provide a valid API key * which is validated here to get the instanceId. + * + * @param project - Optional project name. Defaults to "default" for backward compatibility. */ export const ask = action({ args: { apiKey: v.string(), question: v.string(), - resources: v.array(v.string()) + resources: v.array(v.string()), + project: v.optional(v.string()) }, handler: async (ctx, args): Promise => { - const { apiKey, question, resources } = args; + const { apiKey, question, resources, project: projectName } = args; - // Validate API key and get instanceId - const validation = await ctx.runQuery(api.apiKeys.validate, { apiKey }); + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; if (!validation.valid) { return { ok: false as const, error: validation.error }; } - const instanceId = validation.userId; + const instanceId = validation.instanceId; // Get instance const instance = await ctx.runQuery(instances.internalQueries.getInternal, { id: instanceId }); @@ -39,9 +151,13 @@ export const ask = action({ return { ok: false as const, error: 'Instance not found' }; } - // Touch API key usage - await ctx.runMutation(api.apiKeys.touchLastUsed, { keyId: validation.keyId }); + // Get or create the project + const projectId = await getOrCreateProject(ctx, instanceId, projectName); + + // Note: Usage tracking is handled in the validate action via touchUsage + // For now, resources are still at instance level (backward compatible) + // In Phase 5, this will be updated to filter by project const availableResources: { global: { name: string }[]; custom: { name: string }[]; @@ -81,12 +197,17 @@ export const ask = action({ } } + // Pass project name to sandbox for future project-aware directory structure + // For now, "default" project uses root config, other projects will use project subdirectories + const effectiveProjectName = projectName || 'default'; + const response = await fetch(`${serverUrl}/question`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ question, resources, + project: effectiveProjectName, quiet: true }) }); @@ -96,13 +217,22 @@ export const ask = action({ return { ok: false as const, error: errorText || `Server error: ${response.status}` }; } - const result = (await response.json()) as { text?: string }; + const result = (await response.json()) as { answer?: string; text?: string }; + const answerText = result.answer ?? result.text ?? JSON.stringify(result); + + // Record the question/answer for the project + await ctx.runMutation(internal.mcpInternal.recordQuestion, { + projectId, + question, + resources, + answer: answerText + }); await ctx.runMutation(instanceMutations.touchActivity, { instanceId }); return { ok: true as const, - text: result.text ?? JSON.stringify(result) + text: answerText }; } }); @@ -125,24 +255,299 @@ type ListResourcesResult = /** * List available resources for MCP - authenticated via API key + * + * @param project - Optional project name. Defaults to "default" for backward compatibility. + * Currently returns instance-level resources; project-scoped resources come in Phase 5. */ export const listResources = action({ args: { - apiKey: v.string() + apiKey: v.string(), + project: v.optional(v.string()) }, handler: async (ctx, args): Promise => { const { apiKey } = args; - // Validate API key and get instanceId - const validation = await ctx.runQuery(api.apiKeys.validate, { apiKey }); + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; if (!validation.valid) { return { ok: false as const, error: validation.error }; } - const instanceId = validation.userId; + const instanceId = validation.instanceId; + // For now, resources are at instance level (backward compatible) + // In Phase 5, this will be updated to filter by project const { custom } = await ctx.runQuery(api.resources.listAvailableInternal, { instanceId }); return { ok: true as const, resources: custom }; } }); + +type AddResourceResult = + | { ok: false; error: string } + | { + ok: true; + resource: { + name: string; + displayName: string; + type: string; + url: string; + branch: string; + searchPath: string | undefined; + specialNotes: string | undefined; + }; + }; + +/** + * Add a resource via MCP - authenticated via API key + */ +export const addResource = action({ + args: { + apiKey: v.string(), + url: v.string(), + name: v.string(), + branch: v.string(), + searchPath: v.optional(v.string()), + searchPaths: v.optional(v.array(v.string())), + notes: v.optional(v.string()), + project: v.optional(v.string()) + }, + handler: async (ctx, args): Promise => { + const { + apiKey, + url, + name, + branch, + searchPath, + searchPaths, + notes, + project: projectName + } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false as const, error: validation.error }; + } + + const instanceId = validation.instanceId; + + // Get or create the project + const projectId = await getOrCreateProject(ctx, instanceId, projectName); + + // Note: Usage tracking is handled in the validate action via touchUsage + + // Validate URL (basic check) + if (!url.startsWith('https://')) { + return { ok: false as const, error: 'URL must be an HTTPS URL' }; + } + + // Check if resource with this name already exists + const { custom } = await ctx.runQuery(api.resources.listAvailableInternal, { instanceId }); + if (custom.some((r) => r.name.toLowerCase() === name.toLowerCase())) { + return { ok: false as const, error: `Resource "${name}" already exists` }; + } + + // Add the resource + const finalSearchPath = searchPath ?? searchPaths?.[0]; + await ctx.runMutation(internal.mcpInternal.addResourceInternal, { + instanceId, + projectId, + name, + url, + branch, + searchPath: finalSearchPath, + specialNotes: notes + }); + + return { + ok: true as const, + resource: { + name, + displayName: name, + type: 'git', + url, + branch, + searchPath: finalSearchPath, + specialNotes: notes + } + }; + } +}); + +type SyncResult = { + ok: boolean; + errors?: string[]; + synced: string[]; + conflicts?: Array<{ + name: string; + local: { url: string; branch: string }; + remote: { url: string; branch: string }; + }>; +}; + +/** + * Sync remote config with cloud - authenticated via API key + */ +export const sync = action({ + args: { + apiKey: v.string(), + config: v.string(), + force: v.boolean() + }, + handler: async (ctx, args): Promise => { + const { apiKey, config: configStr, force } = args; + + // Validate API key with Clerk + const validation = (await ctx.runAction(api.clerkApiKeys.validate, { + apiKey + })) as ApiKeyValidationResult; + if (!validation.valid) { + return { ok: false, errors: [validation.error], synced: [] }; + } + + const instanceId = validation.instanceId; + + // Note: Usage tracking is handled in the validate action via touchUsage + + // Parse the config + let config: { + project: string; + model?: string; + resources: Array<{ + type?: string; + name: string; + url: string; + branch: string; + searchPath?: string; + searchPaths?: string[]; + specialNotes?: string; + }>; + }; + + try { + const stripped = stripJsonComments(configStr); + config = JSON.parse(stripped); + } catch (e) { + const errorMsg = e instanceof Error ? e.message : 'Unknown parse error'; + return { + ok: false, + errors: [`Invalid JSON in config: ${errorMsg}`], + synced: [] + }; + } + + if (!config.project || typeof config.project !== 'string') { + return { ok: false, errors: ['Missing or invalid "project" field in config (must be a string)'], synced: [] }; + } + + if (!Array.isArray(config.resources)) { + return { ok: false, errors: ['Missing or invalid "resources" field in config (must be an array)'], synced: [] }; + } + + const resourceErrors: string[] = []; + for (let i = 0; i < config.resources.length; i++) { + const r = config.resources[i]; + if (!r || typeof r !== 'object') { + resourceErrors.push(`resources[${i}]: must be an object`); + continue; + } + if (!r.name || typeof r.name !== 'string') { + resourceErrors.push(`resources[${i}]: missing or invalid "name" (must be a string)`); + } + if (!r.url || typeof r.url !== 'string') { + resourceErrors.push(`resources[${i}]: missing or invalid "url" (must be a string)`); + } + if (!r.branch || typeof r.branch !== 'string') { + resourceErrors.push(`resources[${i}]: missing or invalid "branch" (must be a string)`); + } + } + + if (resourceErrors.length > 0) { + return { ok: false, errors: resourceErrors, synced: [] }; + } + + // Get or create the project + const projectId = await getOrCreateProject(ctx, instanceId, config.project); + + // Get current resources + const { custom: existingResources } = await ctx.runQuery(api.resources.listAvailableInternal, { + instanceId + }); + + const synced: string[] = []; + const errors: string[] = []; + const conflicts: SyncResult['conflicts'] = []; + + // Process each resource in the config + for (const localResource of config.resources) { + const existingResource = existingResources.find( + (r) => r.name.toLowerCase() === localResource.name.toLowerCase() + ); + + if (existingResource) { + // Check for conflicts + const urlMatch = existingResource.url === localResource.url; + const branchMatch = existingResource.branch === localResource.branch; + + if (!urlMatch || !branchMatch) { + if (force) { + // Update the resource + await ctx.runMutation(internal.mcpInternal.updateResourceInternal, { + instanceId, + name: localResource.name, + url: localResource.url, + branch: localResource.branch, + searchPath: localResource.searchPath ?? localResource.searchPaths?.[0], + specialNotes: localResource.specialNotes + }); + synced.push(localResource.name); + } else { + conflicts.push({ + name: localResource.name, + local: { url: localResource.url, branch: localResource.branch }, + remote: { url: existingResource.url, branch: existingResource.branch } + }); + } + } + // If they match, nothing to do + } else { + // Add new resource + try { + await ctx.runMutation(internal.mcpInternal.addResourceInternal, { + instanceId, + projectId, + name: localResource.name, + url: localResource.url, + branch: localResource.branch, + searchPath: localResource.searchPath ?? localResource.searchPaths?.[0], + specialNotes: localResource.specialNotes + }); + synced.push(localResource.name); + } catch (err) { + errors.push( + `Failed to add "${localResource.name}": ${err instanceof Error ? err.message : String(err)}` + ); + } + } + } + + // Update project model if specified + if (config.model) { + await ctx.runMutation(internal.mcpInternal.updateProjectModelInternal, { + projectId, + model: config.model + }); + } + + if (conflicts.length > 0) { + return { ok: false, errors, synced, conflicts }; + } + + return { ok: errors.length === 0, errors: errors.length > 0 ? errors : undefined, synced }; + } +}); diff --git a/apps/web/src/convex/mcpInternal.ts b/apps/web/src/convex/mcpInternal.ts new file mode 100644 index 00000000..8f3d2e2b --- /dev/null +++ b/apps/web/src/convex/mcpInternal.ts @@ -0,0 +1,142 @@ +import { v } from 'convex/values'; + +import type { Id } from './_generated/dataModel'; +import { internalMutation } from './_generated/server'; + +/** + * Internal mutation to create a project (used by MCP to avoid auth requirements) + */ +export const createProjectInternal = internalMutation({ + args: { + instanceId: v.id('instances'), + name: v.string(), + isDefault: v.boolean() + }, + handler: async (ctx, args): Promise> => { + // Double-check it doesn't exist (race condition protection) + const existing = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', args.instanceId).eq('name', args.name) + ) + .first(); + + if (existing) { + return existing._id; + } + + return await ctx.db.insert('projects', { + instanceId: args.instanceId, + name: args.name, + isDefault: args.isDefault, + createdAt: Date.now() + }); + } +}); + +/** + * Internal mutation to record an MCP question + */ +export const recordQuestion = internalMutation({ + args: { + projectId: v.id('projects'), + question: v.string(), + resources: v.array(v.string()), + answer: v.string() + }, + handler: async (ctx, args) => { + return await ctx.db.insert('mcpQuestions', { + projectId: args.projectId, + question: args.question, + resources: args.resources, + answer: args.answer, + createdAt: Date.now() + }); + } +}); + +/** + * Internal mutation to add a resource (used by MCP to avoid auth requirements) + */ +export const addResourceInternal = internalMutation({ + args: { + instanceId: v.id('instances'), + projectId: v.id('projects'), + name: v.string(), + url: v.string(), + branch: v.string(), + searchPath: v.optional(v.string()), + specialNotes: v.optional(v.string()) + }, + handler: async (ctx, args): Promise> => { + // Check if resource with this name already exists for this instance + const existing = await ctx.db + .query('userResources') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .filter((q) => q.eq(q.field('name'), args.name)) + .first(); + + if (existing) { + throw new Error(`Resource "${args.name}" already exists`); + } + + return await ctx.db.insert('userResources', { + instanceId: args.instanceId, + projectId: args.projectId, + name: args.name, + type: 'git', + url: args.url, + branch: args.branch, + searchPath: args.searchPath, + specialNotes: args.specialNotes, + createdAt: Date.now() + }); + } +}); + +/** + * Internal mutation to update a resource (used by MCP sync) + */ +export const updateResourceInternal = internalMutation({ + args: { + instanceId: v.id('instances'), + name: v.string(), + url: v.string(), + branch: v.string(), + searchPath: v.optional(v.string()), + specialNotes: v.optional(v.string()) + }, + handler: async (ctx, args) => { + const existing = await ctx.db + .query('userResources') + .withIndex('by_instance', (q) => q.eq('instanceId', args.instanceId)) + .filter((q) => q.eq(q.field('name'), args.name)) + .first(); + + if (!existing) { + throw new Error(`Resource "${args.name}" not found`); + } + + await ctx.db.patch(existing._id, { + url: args.url, + branch: args.branch, + searchPath: args.searchPath, + specialNotes: args.specialNotes + }); + } +}); + +/** + * Internal mutation to update a project's model (used by MCP sync) + */ +export const updateProjectModelInternal = internalMutation({ + args: { + projectId: v.id('projects'), + model: v.string() + }, + handler: async (ctx, args) => { + await ctx.db.patch(args.projectId, { + model: args.model + }); + } +}); diff --git a/apps/web/src/convex/mcpQuestions.ts b/apps/web/src/convex/mcpQuestions.ts new file mode 100644 index 00000000..0a4f016d --- /dev/null +++ b/apps/web/src/convex/mcpQuestions.ts @@ -0,0 +1,91 @@ +import { v } from 'convex/values'; + +import { query } from './_generated/server'; +import { getAuthenticatedInstance } from './authHelpers'; + +/** + * List MCP questions for a project. + * Returns questions in reverse chronological order (newest first). + */ +export const list = query({ + args: { + projectId: v.id('projects'), + limit: v.optional(v.number()) + }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const limit = args.limit ?? 50; + + // Verify the project belongs to this instance + const project = await ctx.db.get(args.projectId); + if (!project || project.instanceId !== instance._id) { + throw new Error('Project not found'); + } + + const questions = await ctx.db + .query('mcpQuestions') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .take(limit); + + // Sort by createdAt descending (newest first) + return questions.sort((a, b) => b.createdAt - a.createdAt); + } +}); + +/** + * List MCP questions for the default project of the authenticated user's instance. + * Convenience method when no specific project is specified. + */ +export const listForDefaultProject = query({ + args: { + limit: v.optional(v.number()) + }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const limit = args.limit ?? 50; + + // Find the default project + const defaultProject = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', 'default') + ) + .first(); + + if (!defaultProject) { + // No default project yet, return empty list + return []; + } + + const questions = await ctx.db + .query('mcpQuestions') + .withIndex('by_project', (q) => q.eq('projectId', defaultProject._id)) + .take(limit); + + // Sort by createdAt descending (newest first) + return questions.sort((a, b) => b.createdAt - a.createdAt); + } +}); + +/** + * Get a specific MCP question by ID (requires ownership through project -> instance) + */ +export const get = query({ + args: { questionId: v.id('mcpQuestions') }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + + const question = await ctx.db.get(args.questionId); + if (!question) { + return null; + } + + // Verify ownership through project + const project = await ctx.db.get(question.projectId); + if (!project || project.instanceId !== instance._id) { + return null; + } + + return question; + } +}); diff --git a/apps/web/src/convex/migrations.ts b/apps/web/src/convex/migrations.ts new file mode 100644 index 00000000..ddcf3921 --- /dev/null +++ b/apps/web/src/convex/migrations.ts @@ -0,0 +1,217 @@ +import { Migrations } from '@convex-dev/migrations'; + +import { components, internal } from './_generated/api'; +import type { DataModel, Id } from './_generated/dataModel'; +import { internalMutation, internalQuery } from './_generated/server'; + +// Initialize the migrations component +export const migrations = new Migrations(components.migrations); + +// Runner for CLI/dashboard access +export const run = migrations.runner(); + +/** + * Migration: Add projectId to threads that don't have one. + * + * For each thread without a projectId, finds or creates the default project + * for its instance, then sets the projectId. + */ +export const migrateThreadsToProject = migrations.define({ + table: 'threads', + customRange: (query) => query.filter((q) => q.eq(q.field('projectId'), undefined)), + batchSize: 50, + migrateOne: async (ctx, thread) => { + // Get or create the default project for this instance + const projectId = await getOrCreateDefaultProject(ctx, thread.instanceId); + return { projectId }; + } +}); + +/** + * Migration: Add projectId to userResources that don't have one. + */ +export const migrateUserResourcesToProject = migrations.define({ + table: 'userResources', + customRange: (query) => query.filter((q) => q.eq(q.field('projectId'), undefined)), + batchSize: 50, + migrateOne: async (ctx, resource) => { + const projectId = await getOrCreateDefaultProject(ctx, resource.instanceId); + return { projectId }; + } +}); + +/** + * Migration: Add projectId to cachedResources that don't have one. + */ +export const migrateCachedResourcesToProject = migrations.define({ + table: 'cachedResources', + customRange: (query) => query.filter((q) => q.eq(q.field('projectId'), undefined)), + batchSize: 50, + migrateOne: async (ctx, resource) => { + const projectId = await getOrCreateDefaultProject(ctx, resource.instanceId); + return { projectId }; + } +}); + +/** + * Helper: Get or create the default project for an instance. + * Caches results within a single migration batch. + */ +const projectCache = new Map>(); + +async function getOrCreateDefaultProject( + ctx: { + db: { + query: (table: 'projects') => any; + insert: (table: 'projects', doc: any) => Promise>; + }; + }, + instanceId: Id<'instances'> +): Promise> { + // Check cache first + const cached = projectCache.get(instanceId); + if (cached) return cached; + + // Look for existing default project + const existing = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q: any) => + q.eq('instanceId', instanceId).eq('name', 'default') + ) + .first(); + + if (existing) { + projectCache.set(instanceId, existing._id); + return existing._id; + } + + // Create new default project + const projectId = await ctx.db.insert('projects', { + instanceId, + name: 'default', + isDefault: true, + createdAt: Date.now() + }); + + projectCache.set(instanceId, projectId); + return projectId; +} + +/** + * Runner for all Phase 3 migrations in sequence. + * Run with: npx convex run migrations:runAll + */ +export const runAll = migrations.runner([ + internal.migrations.migrateThreadsToProject, + internal.migrations.migrateUserResourcesToProject, + internal.migrations.migrateCachedResourcesToProject +]); + +// ============================================================================ +// Status and utility functions +// ============================================================================ + +/** + * Get the current migration status. + * Shows counts of records that still need migration. + */ +export const getMigrationStatus = internalQuery({ + args: {}, + handler: async (ctx) => { + const totalInstances = (await ctx.db.query('instances').collect()).length; + const totalProjects = (await ctx.db.query('projects').collect()).length; + + // Count records without projectId + const threadsWithoutProject = ( + await ctx.db + .query('threads') + .filter((q) => q.eq(q.field('projectId'), undefined)) + .collect() + ).length; + + const userResourcesWithoutProject = ( + await ctx.db + .query('userResources') + .filter((q) => q.eq(q.field('projectId'), undefined)) + .collect() + ).length; + + const cachedResourcesWithoutProject = ( + await ctx.db + .query('cachedResources') + .filter((q) => q.eq(q.field('projectId'), undefined)) + .collect() + ).length; + + return { + totalInstances, + totalProjects, + threadsWithoutProject, + userResourcesWithoutProject, + cachedResourcesWithoutProject, + migrationComplete: + threadsWithoutProject === 0 && + userResourcesWithoutProject === 0 && + cachedResourcesWithoutProject === 0 + }; + } +}); + +/** + * Get instances that don't have a default project yet. + */ +export const getInstancesWithoutDefaultProject = internalQuery({ + args: {}, + handler: async (ctx) => { + const instances = await ctx.db.query('instances').collect(); + const results: Id<'instances'>[] = []; + + for (const instance of instances) { + const defaultProject = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', 'default') + ) + .first(); + + if (!defaultProject) { + results.push(instance._id); + } + } + + return results; + } +}); + +/** + * Create default projects for all instances that don't have one. + * This can be run before or as part of the main migration. + */ +export const createMissingDefaultProjects = internalMutation({ + args: {}, + handler: async (ctx) => { + const instances = await ctx.db.query('instances').collect(); + let created = 0; + + for (const instance of instances) { + const existing = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', 'default') + ) + .first(); + + if (!existing) { + await ctx.db.insert('projects', { + instanceId: instance._id, + name: 'default', + isDefault: true, + createdAt: Date.now() + }); + created++; + } + } + + return { created, total: instances.length }; + } +}); diff --git a/apps/web/src/convex/projects.ts b/apps/web/src/convex/projects.ts new file mode 100644 index 00000000..a815c8f5 --- /dev/null +++ b/apps/web/src/convex/projects.ts @@ -0,0 +1,352 @@ +import { v } from 'convex/values'; + +import { internal } from './_generated/api'; +import type { Id } from './_generated/dataModel'; +import { internalQuery, mutation, query } from './_generated/server'; +import { AnalyticsEvents } from './analyticsEvents'; +import { getAuthenticatedInstance } from './authHelpers'; + +/** + * List all projects for the authenticated user's instance + */ +export const list = query({ + args: {}, + handler: async (ctx) => { + const instance = await getAuthenticatedInstance(ctx); + + const projects = await ctx.db + .query('projects') + .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) + .collect(); + + return projects.sort((a, b) => { + // Default project always first + if (a.isDefault && !b.isDefault) return -1; + if (!a.isDefault && b.isDefault) return 1; + // Then by creation date (newest first) + return b.createdAt - a.createdAt; + }); + } +}); + +/** + * Get a project by name for the authenticated user's instance + */ +export const getByName = query({ + args: { name: v.string() }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + + return await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', args.name) + ) + .first(); + } +}); + +/** + * Get a project by ID (requires ownership through instance) + */ +export const get = query({ + args: { projectId: v.id('projects') }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const project = await ctx.db.get(args.projectId); + + if (!project || project.instanceId !== instance._id) { + return null; + } + + return project; + } +}); + +/** + * Get the default project for the authenticated user's instance. + * Creates one if it doesn't exist. + */ +export const getDefault = query({ + args: {}, + handler: async (ctx) => { + const instance = await getAuthenticatedInstance(ctx); + + const defaultProject = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', 'default') + ) + .first(); + + return defaultProject; + } +}); + +/** + * Create a new project for the authenticated user's instance + */ +export const create = mutation({ + args: { + name: v.string(), + model: v.optional(v.string()) + }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + + // Check if project with this name already exists + const existing = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', args.name) + ) + .first(); + + if (existing) { + throw new Error(`Project with name "${args.name}" already exists`); + } + + const projectId = await ctx.db.insert('projects', { + instanceId: instance._id, + name: args.name, + model: args.model, + isDefault: false, + createdAt: Date.now() + }); + + await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { + distinctId: instance.clerkId, + event: AnalyticsEvents.PROJECT_CREATED, + properties: { + instanceId: instance._id, + projectId, + projectName: args.name, + hasModel: !!args.model + } + }); + + return projectId; + } +}); + +/** + * Ensure the default project exists for an instance. + * Creates it if it doesn't exist. Idempotent. + */ +export const ensureDefault = mutation({ + args: {}, + handler: async (ctx): Promise> => { + const instance = await getAuthenticatedInstance(ctx); + + // Check if default project exists + const existing = await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', instance._id).eq('name', 'default') + ) + .first(); + + if (existing) { + return existing._id; + } + + // Create the default project + const projectId = await ctx.db.insert('projects', { + instanceId: instance._id, + name: 'default', + isDefault: true, + createdAt: Date.now() + }); + + await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { + distinctId: instance.clerkId, + event: AnalyticsEvents.PROJECT_CREATED, + properties: { + instanceId: instance._id, + projectId, + projectName: 'default', + isDefault: true + } + }); + + return projectId; + } +}); + +/** + * Update a project's model setting + */ +export const updateModel = mutation({ + args: { + projectId: v.id('projects'), + model: v.optional(v.string()) + }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const project = await ctx.db.get(args.projectId); + + if (!project || project.instanceId !== instance._id) { + throw new Error('Project not found'); + } + + await ctx.db.patch(args.projectId, { model: args.model }); + } +}); + +/** + * Delete a project (cannot delete the default project) + */ +export const remove = mutation({ + args: { projectId: v.id('projects') }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const project = await ctx.db.get(args.projectId); + + if (!project || project.instanceId !== instance._id) { + throw new Error('Project not found'); + } + + if (project.isDefault) { + throw new Error('Cannot delete the default project'); + } + + // Delete all related threads + const threads = await ctx.db + .query('threads') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + for (const thread of threads) { + // Delete messages for this thread + const messages = await ctx.db + .query('messages') + .withIndex('by_thread', (q) => q.eq('threadId', thread._id)) + .collect(); + + for (const message of messages) { + await ctx.db.delete(message._id); + } + + // Delete thread resources + const threadResources = await ctx.db + .query('threadResources') + .withIndex('by_thread', (q) => q.eq('threadId', thread._id)) + .collect(); + + for (const resource of threadResources) { + await ctx.db.delete(resource._id); + } + + await ctx.db.delete(thread._id); + } + + // Delete all related userResources + const userResources = await ctx.db + .query('userResources') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + for (const resource of userResources) { + await ctx.db.delete(resource._id); + } + + // Delete all related cachedResources + const cachedResources = await ctx.db + .query('cachedResources') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + for (const resource of cachedResources) { + await ctx.db.delete(resource._id); + } + + // Delete all related mcpQuestions + const mcpQuestions = await ctx.db + .query('mcpQuestions') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + for (const question of mcpQuestions) { + await ctx.db.delete(question._id); + } + + // Finally delete the project + await ctx.db.delete(args.projectId); + + await ctx.scheduler.runAfter(0, internal.analytics.trackEvent, { + distinctId: instance.clerkId, + event: AnalyticsEvents.PROJECT_DELETED, + properties: { + instanceId: instance._id, + projectId: args.projectId, + projectName: project.name, + deletedThreads: threads.length, + deletedUserResources: userResources.length + } + }); + } +}); + +/** + * Internal query to get project by instance ID and name + * Used by MCP and other internal operations + */ +export const getByInstanceAndName = internalQuery({ + args: { + instanceId: v.id('instances'), + name: v.string() + }, + handler: async (ctx, args) => { + return await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', args.instanceId).eq('name', args.name) + ) + .first(); + } +}); + +/** + * Internal query to get default project by instance ID + */ +export const getDefaultByInstance = internalQuery({ + args: { instanceId: v.id('instances') }, + handler: async (ctx, args) => { + return await ctx.db + .query('projects') + .withIndex('by_instance_and_name', (q) => + q.eq('instanceId', args.instanceId).eq('name', 'default') + ) + .first(); + } +}); + +/** + * List MCP questions for a project + */ +export const listQuestions = query({ + args: { projectId: v.id('projects') }, + handler: async (ctx, args) => { + const instance = await getAuthenticatedInstance(ctx); + const project = await ctx.db.get(args.projectId); + + if (!project || project.instanceId !== instance._id) { + return []; + } + + const questions = await ctx.db + .query('mcpQuestions') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + + return questions + .map((q) => ({ + _id: q._id, + question: q.question, + resources: q.resources, + answer: q.answer, + createdAt: q.createdAt + })) + .sort((a, b) => b.createdAt - a.createdAt); + } +}); diff --git a/apps/web/src/convex/resources.ts b/apps/web/src/convex/resources.ts index 84838df8..d83b2f68 100644 --- a/apps/web/src/convex/resources.ts +++ b/apps/web/src/convex/resources.ts @@ -19,17 +19,34 @@ export const listGlobal = query({ }); /** - * List user resources for the authenticated user's instance + * List user resources for the authenticated user's instance, optionally filtered by project */ export const listUserResources = query({ - args: {}, - handler: async (ctx) => { + args: { + projectId: v.optional(v.id('projects')) + }, + handler: async (ctx, args) => { const instance = await getAuthenticatedInstance(ctx); - return await ctx.db + if (args.projectId) { + const resources = await ctx.db + .query('userResources') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + return resources.filter((r) => r.instanceId === instance._id); + } + + const allResources = await ctx.db .query('userResources') .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) .collect(); + + const seen = new Set(); + return allResources.filter((r) => { + if (seen.has(r.name)) return false; + seen.add(r.name); + return true; + }); } }); @@ -119,13 +136,15 @@ export const addCustomResource = mutation({ url: v.string(), branch: v.string(), searchPath: v.optional(v.string()), - specialNotes: v.optional(v.string()) + specialNotes: v.optional(v.string()), + projectId: v.optional(v.id('projects')) }, handler: async (ctx, args) => { const instance = await getAuthenticatedInstance(ctx); const resourceId = await ctx.db.insert('userResources', { instanceId: instance._id, + projectId: args.projectId, name: args.name, type: 'git', url: args.url, diff --git a/apps/web/src/convex/schema.ts b/apps/web/src/convex/schema.ts index c7bfd003..eeba023b 100644 --- a/apps/web/src/convex/schema.ts +++ b/apps/web/src/convex/schema.ts @@ -72,28 +72,28 @@ export default defineSchema({ .index('by_clerk_id', ['clerkId']) .index('by_sandbox_id', ['sandboxId']), + projects: defineTable({ + instanceId: v.id('instances'), + name: v.string(), + model: v.optional(v.string()), + isDefault: v.boolean(), + createdAt: v.number() + }) + .index('by_instance', ['instanceId']) + .index('by_instance_and_name', ['instanceId', 'name']), + cachedResources: defineTable({ instanceId: v.id('instances'), + projectId: v.optional(v.id('projects')), name: v.string(), url: v.string(), branch: v.string(), sizeBytes: v.optional(v.number()), cachedAt: v.number(), lastUsedAt: v.number() - }).index('by_instance', ['instanceId']), - - apiKeys: defineTable({ - instanceId: v.id('instances'), - name: v.string(), - keyHash: v.string(), - keyPrefix: v.string(), - createdAt: v.number(), - lastUsedAt: v.optional(v.number()), - revokedAt: v.optional(v.number()), - usageCount: v.optional(v.number()) }) .index('by_instance', ['instanceId']) - .index('by_key_hash', ['keyHash']), + .index('by_project', ['projectId']), globalResources: defineTable({ name: v.string(), @@ -108,6 +108,7 @@ export default defineSchema({ userResources: defineTable({ instanceId: v.id('instances'), + projectId: v.optional(v.id('projects')), name: v.string(), type: v.literal('git'), url: v.string(), @@ -115,14 +116,19 @@ export default defineSchema({ searchPath: v.optional(v.string()), specialNotes: v.optional(v.string()), createdAt: v.number() - }).index('by_instance', ['instanceId']), + }) + .index('by_instance', ['instanceId']) + .index('by_project', ['projectId']), threads: defineTable({ instanceId: v.id('instances'), + projectId: v.optional(v.id('projects')), title: v.optional(v.string()), createdAt: v.number(), lastActivityAt: v.number() - }).index('by_instance', ['instanceId']), + }) + .index('by_instance', ['instanceId']) + .index('by_project', ['projectId']), messages: defineTable({ threadId: v.id('threads'), @@ -150,5 +156,25 @@ export default defineSchema({ .index('by_thread', ['threadId']) .index('by_message', ['messageId']) .index('by_session', ['sessionId']) - .index('by_status', ['status']) + .index('by_status', ['status']), + + mcpQuestions: defineTable({ + projectId: v.id('projects'), + question: v.string(), + resources: v.array(v.string()), + answer: v.string(), + createdAt: v.number() + }).index('by_project', ['projectId']), + + apiKeyUsage: defineTable({ + clerkApiKeyId: v.string(), // "ak_xxx" from Clerk + clerkUserId: v.string(), // "user_xxx" - the subject from Clerk + instanceId: v.id('instances'), + name: v.optional(v.string()), // Cached name for display + lastUsedAt: v.optional(v.number()), + usageCount: v.number(), + createdAt: v.number() + }) + .index('by_clerk_api_key_id', ['clerkApiKeyId']) + .index('by_instance', ['instanceId']) }); diff --git a/apps/web/src/convex/threads.ts b/apps/web/src/convex/threads.ts index 4d6119e0..2fbe9a03 100644 --- a/apps/web/src/convex/threads.ts +++ b/apps/web/src/convex/threads.ts @@ -6,17 +6,28 @@ import { AnalyticsEvents } from './analyticsEvents'; import { getAuthenticatedInstance, requireThreadOwnership } from './authHelpers'; /** - * List threads for the authenticated user's instance + * List threads for the authenticated user's instance, optionally filtered by project */ export const list = query({ - args: {}, - handler: async (ctx) => { + args: { + projectId: v.optional(v.id('projects')) + }, + handler: async (ctx, args) => { const instance = await getAuthenticatedInstance(ctx); - const threads = await ctx.db - .query('threads') - .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) - .collect(); + let threads; + if (args.projectId) { + threads = await ctx.db + .query('threads') + .withIndex('by_project', (q) => q.eq('projectId', args.projectId)) + .collect(); + threads = threads.filter((t) => t.instanceId === instance._id); + } else { + threads = await ctx.db + .query('threads') + .withIndex('by_instance', (q) => q.eq('instanceId', instance._id)) + .collect(); + } const activeStreamSessions = await ctx.db .query('streamSessions') @@ -79,13 +90,15 @@ export const getWithMessages = query({ */ export const create = mutation({ args: { - title: v.optional(v.string()) + title: v.optional(v.string()), + projectId: v.optional(v.id('projects')) }, handler: async (ctx, args) => { const instance = await getAuthenticatedInstance(ctx); const threadId = await ctx.db.insert('threads', { instanceId: instance._id, + projectId: args.projectId, title: args.title, createdAt: Date.now(), lastActivityAt: Date.now() diff --git a/apps/web/src/convex/usage.ts b/apps/web/src/convex/usage.ts index bf4a9520..21b18110 100644 --- a/apps/web/src/convex/usage.ts +++ b/apps/web/src/convex/usage.ts @@ -716,7 +716,7 @@ export const createCheckoutSession = action({ } return { - url: attachPayload.data?.checkout_url ?? `${args.baseUrl}/checkout/success` + url: attachPayload.data?.checkout_url ?? `${args.baseUrl}/app/checkout/success` }; } }); @@ -756,7 +756,7 @@ export const createBillingPortalSession = action({ } return { - url: payload.data?.url ?? `${args.baseUrl}/settings/billing` + url: payload.data?.url ?? `${args.baseUrl}/app/settings/billing` }; } }); diff --git a/apps/web/src/lib/components/CreateProjectModal.svelte b/apps/web/src/lib/components/CreateProjectModal.svelte new file mode 100644 index 00000000..73aac4ee --- /dev/null +++ b/apps/web/src/lib/components/CreateProjectModal.svelte @@ -0,0 +1,92 @@ + + +{#if projectStore.showCreateModal} + +{/if} diff --git a/apps/web/src/lib/components/ProjectSelector.svelte b/apps/web/src/lib/components/ProjectSelector.svelte new file mode 100644 index 00000000..3ebc3f7f --- /dev/null +++ b/apps/web/src/lib/components/ProjectSelector.svelte @@ -0,0 +1,170 @@ + + + + +
+ + + {#if isOpen} +
+ {#each projectStore.projects as project (project._id)} + + {/each} + +
+ + +
+ {/if} +
+ +{#if showCreateModal} + + +{/if} diff --git a/apps/web/src/lib/components/Sidebar.svelte b/apps/web/src/lib/components/Sidebar.svelte index 35d55f14..ae8723a2 100644 --- a/apps/web/src/lib/components/Sidebar.svelte +++ b/apps/web/src/lib/components/Sidebar.svelte @@ -2,12 +2,11 @@ import { BookOpen, Bot, - CreditCard, - Github, - Home, - Key, + Check, + ChevronDown, + FolderOpen, Loader2, - Menu, + MessageSquare, Moon, Plus, Search, @@ -21,9 +20,9 @@ import { createEventDispatcher } from 'svelte'; import { useConvexClient } from 'convex-svelte'; import { api } from '../../convex/_generated/api'; - import { getAuthState, openSignIn, openUserProfile, signOut } from '$lib/stores/auth.svelte'; + import { getAuthState, openSignIn, signOut } from '$lib/stores/auth.svelte'; import { getThemeStore } from '$lib/stores/theme.svelte'; - import { getBillingStore } from '$lib/stores/billing.svelte'; + import { getProjectStore } from '$lib/stores/project.svelte'; import InstanceStatus from '$lib/components/InstanceStatus.svelte'; import { trackEvent, ClientAnalyticsEvents } from '$lib/stores/analytics.svelte'; @@ -45,12 +44,13 @@ const dispatch = createEventDispatcher<{ close: void }>(); const auth = getAuthState(); - const billingStore = getBillingStore(); const themeStore = getThemeStore(); + const projectStore = getProjectStore(); const client = useConvexClient(); let searchValue = $state(''); let showUserMenu = $state(false); + let showProjectsSection = $state(true); const filteredThreads = $derived.by(() => { const query = searchValue.trim().toLowerCase(); @@ -87,6 +87,7 @@ function toggleTheme() { themeStore.toggle(); + showUserMenu = false; } function handleSignOut() { @@ -101,6 +102,14 @@ showUserMenu = false; } } + + async function selectProject(projectId: string) { + await projectStore.selectProjectWithNavigation(projectId as any); + } + + function openCreateProjectModal() { + projectStore.showCreateModal = true; + } @@ -119,14 +128,76 @@
-
- - - Resources - -
+ + + {#if showProjectsSection} +
+ {#if projectStore.isLoading} +
+ + Loading... +
+ {:else} + {#each projectStore.projects as project (project._id)} + + {/each} + + {/if} +
+ {/if} +
+ +
+
@@ -206,7 +277,31 @@ {/if}
-