diff --git a/.gitignore b/.gitignore
index ef38a4a..2cfd108 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,5 +43,9 @@ disassemble.py
clean_js.py
*.md
+
+!/docs/
+!/docs/**/*.md
+
*.png
*.mdx
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..549b61b
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2026 Laurence Long
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index c6b9940..0c90026 100644
--- a/README.md
+++ b/README.md
@@ -1,367 +1,311 @@
# GitVibe
-A local-first web application that orchestrates multiple AI coding agents to work on code changes in isolated Git worktrees, with PR-first workflow, review capabilities, and deterministic patch-based imports.
-
-## Table of Contents
-
-- [Features](#features)
-- [Architecture](#architecture)
-- [Tech Stack](#tech-stack)
-- [Getting Started](#getting-started)
-- [Usage](#usage)
-- [Project Structure](#project-structure)
-- [API Endpoints](#api-endpoints)
-- [Storage](#storage)
-- [Workspace Locking](#workspace-locking-mechanism)
-- [Project Concurrency](#project-concurrency-limits)
-- [Agent Adapters](#agent-adapters)
-- [Development](#development)
-- [Troubleshooting](#troubleshooting)
-- [License](#license)
-
-## Features
-
-### Core Features
-- **Project Management**: Register and manage source Git repositories with relay repository support
-- **Target Repos**: Configure destination repositories for importing patches
-- **WorkItems**: Create work items that own persistent worktree workspaces for code changes
-- **Pull Requests**: First-class PR model with merge gates, conflict detection, and review
-- **Agent Integration**: Trigger multiple AI coding agents (OpenCode, ClaudeCode) to modify code in serialized runs
-- **Workspace Locking**: Ensures only one agent run per WorkItem at a time
-- **Auto-Commit**: Backend automatically commits changes after each agent run
-- **Diff Viewing**: View code changes with inline diff viewer
-- **Review System**: Add review threads and comments to PRs with severity levels
-- **Patch Import**: Import changes to target repositories using patch files
-- **Full Audit Trail**: Track all agent runs, commits, and imports
-
-### Advanced Features
-- **Session-based Resume**: Continue conversations across multiple agent runs using session IDs
-- **Multiple Merge Strategies**: Support for merge, squash, and rebase strategies
-- **Patch Export**: Export PR changes as patch files
-- **Model Cache**: Cached list of available models for each agent
-- **Review Addressing**: Trigger agent corrections based on review comments
-- **Base Update**: Update PR base branch and optionally rebase head
-- **Real-time Log Streaming**: View agent logs in real-time with separate stdout/stderr
+
-## Architecture
+
-GitVibe uses a **PR-centric and WorkItem-workspace-centric** model:
+**Orchestrate AI coding agents with confidence**
-### Core Principles
+[](https://opensource.org/licenses/MIT)
+[](https://nodejs.org/)
+[](https://www.typescriptlang.org/)
-1. **Workspaces are owned by WorkItems**, not by PRs
-2. **PRs control review and merge** - they are the gatekeepers for code changes
-3. **Agent runs are serialized** - only one run per WorkItem at a time
-4. **Auto-commit after runs** - produces clean commit history and stable PR diffs
-5. **sessionId is required** - enables resume functionality with conversation continuity
+
-### System Architecture
+---
-```
-┌─────────────────────────────────────────────────────────────────────────┐
-│ GitVibe System │
-├─────────────────────────────────────────────────────────────────────────┤
-│ │
-│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
-│ │ Frontend │◄──►│ Backend │◄──►│ Database │ │
-│ │ (React) │ │ (Fastify) │ │ (SQLite) │ │
-│ └──────────────┘ └──────────────┘ └──────────────┘ │
-│ │ │ │ │
-│ │ ▼ │ │
-│ │ ┌──────────────┐ │ │
-│ │ │ Agent Service │ │ │
-│ │ └──────────────┘ │ │
-│ │ │ │ │
-│ │ ▼ │ │
-│ │ ┌──────────────┐ │ │
-│ │ │ Agent │ │ │
-│ │ │ Adapters │ │ │
-│ │ │ (OpenCode, │ │ │
-│ │ │ ClaudeCode) │ │ │
-│ │ └──────────────┘ │ │
-│ │ │ │ │
-│ │ ▼ │ │
-│ │ ┌──────────────┐ │ │
-│ └─────────────►│ Git │ │ │
-│ │ Service │ │ │
-│ └──────────────┘ │ │
-│ │ │ │
-│ ▼ │ │
-│ ┌──────────────┐ │ │
-│ │ Relay Repo │ │ │
-│ │ + Worktrees │ │ │
-│ └──────────────┘ │ │
-│ │ │ │
-│ ▼ │ │
-│ ┌──────────────┐ │ │
-│ │ Source Repo │ │ │
-│ └──────────────┘ │ │
-└─────────────────────────────────────────────────────────────────────────┘
-```
+## What is GitVibe?
-### Data Flow
+GitVibe is a **local-first web application** that orchestrates multiple AI coding agents to work on code changes in isolated Git worktrees. It provides a PR-first workflow with review capabilities and deterministic patch-based imports.
-1. **User** creates WorkItem in UI
-2. **Backend** initializes workspace (git worktree + branch)
-3. **User** triggers agent run with prompt
-4. **Backend** spawns agent in worktree workspace
-5. **Agent** edits files in worktree
-6. **Backend** auto-commits changes after run
-7. **PR** is created/updated with new diff
-8. **User** reviews PR and optionally adds comments
-9. **User** merges PR (or imports to target repo)
+**Perfect for teams and developers who want to:**
+- Run AI agents safely in isolated environments
+- Review code changes before merging
+- Track all agent runs with full audit trails
+- Import changes to multiple target repositories
-## Tech Stack
+---
-### Backend
+## Quick Start
-- **Node.js 20+** + TypeScript
-- **Fastify** web framework
-- **SQLite** database with Drizzle ORM
-- **Git CLI** integration via child_process
-- **Agent adapter system** (OpenCode, ClaudeCode)
-- **Zod** for runtime validation
-- **Pino** for logging
+Get GitVibe running in under 5 minutes:
-### Frontend
+```bash
+# 1. Clone and install
+git clone
+cd git-vibe
+npm run install:all
-- **React 18** + TypeScript
-- **Vite** build tool
-- **TanStack Query** for data fetching and caching
-- **TanStack Router** for routing
-- **Tailwind CSS** for styling
-- **React Hook Form** for form management
-- **Lucide React** for icons
-- **React Syntax Highlighter** for code display
+# 2. Setup database
+npm run db:migrate
-### Shared
+# 3. Start development servers
+npm run dev
+```
-- **TypeScript** types and Zod schemas
-- Shared between backend and frontend packages
+That's it! 🎉
-## Getting Started
+- **Backend API**: http://127.0.0.1:11031
+- **Frontend UI**: http://localhost:11990
-### Prerequisites
+---
-- **Node.js >= 20**
-- **npm >= 10**
-- **Git** (must be available in PATH)
-- **AI Agent CLI** (OpenCode or Claude Code) - see agent configuration below
+## Key Features
-### Installation
+### 🚀 Core Capabilities
-1. Clone the repository:
+- **Project Management** - Register and manage source Git repositories
+- **WorkItems** - Create persistent workspaces for code changes
+- **Pull Requests** - First-class PR model with merge gates and conflict detection
+- **Agent Integration** - Trigger OpenCode, ClaudeCode, or custom agents
+- **Workspace Locking** - Ensures only one agent run per WorkItem at a time
+- **Auto-Commit** - Backend automatically commits changes after each agent run
+- **Diff Viewing** - View code changes with inline diff viewer
+- **Review System** - Add review threads with severity levels (info/warning/error)
+- **Patch Import** - Import changes to target repositories using patch files
+- **Full Audit Trail** - Track all agent runs, commits, and imports
-```bash
-git clone
-cd git-vibe
-```
+### 🎯 Advanced Features
-2. Install dependencies for all packages:
+- **Session-based Resume** - Continue conversations across multiple agent runs
+- **Multiple Merge Strategies** - Support for merge, squash, and rebase
+- **Real-time Log Streaming** - View agent logs in real-time with separate stdout/stderr
+- **Review Addressing** - Trigger agent corrections based on review comments
-```bash
-npm run install:all
-```
+---
-3. Run database migrations:
+## Tech Stack
-```bash
-npm run db:migrate
-```
+
-### Development
+**Backend** | **Frontend** | **Database**
+---|---|---
+Node.js 20+ | React 18 | SQLite
+Fastify | Vite | Drizzle ORM
+TypeScript | TanStack Query | Git CLI
+Pino | TanStack Router | Zod
-Start both backend and frontend in development mode:
+
-```bash
-npm run dev
-```
+---
-This will start:
+## How It Works
-- **Backend API server** at `http://127.0.0.1:11031`
-- **Frontend UI** at `http://localhost:11990`
+### Core Principles
-### Environment Variables
+1. **Workspaces are owned by WorkItems** - Not by PRs
+2. **PRs control review and merge** - Gatekeepers for code changes
+3. **Agent runs are serialized** - Only one run per WorkItem at a time
+4. **Auto-commit after runs** - Clean commit history and stable PR diffs
+5. **Session ID required** - Enables resume functionality with conversation continuity
-Create a `.env` file in the `backend` directory:
+### Workflow Overview
-```env
-PORT=11031
-HOST=127.0.0.1
-DATABASE_URL=./data/db.sqlite
-STORAGE_BASE_DIR=/tmp/git-vibe
-LOG_LEVEL=info
```
-
-**Note**: Agent executable paths are configured per-project in the UI, not via environment variables. See "Agent Configuration" section below.
-
-### Production Build
-
-Build all packages:
-
-```bash
-npm run build
+┌─────────────┐ ┌─────────────┐ ┌─────────────┐
+│ Create │───►│ Trigger │───►│ Review │
+│ WorkItem │ │ Agent Run │ │ PR │
+└─────────────┘ └─────────────┘ └─────────────┘
+ │ │ │
+ ▼ ▼ ▼
+┌─────────────┐ ┌─────────────┐ ┌─────────────┐
+│ Initialize │ │ Auto- │ │ Merge / │
+│ Workspace │ │ Commit │ │ Import │
+└─────────────┘ └─────────────┘ └─────────────┘
```
-Or build individual packages:
-
-```bash
-npm run build:backend
-npm run build:frontend
-npm run build:shared
-```
+---
-## Usage
+## Usage Guide
### 1. Register a Project
-Navigate to **Projects** and add a source Git repository:
+Navigate to **Projects** and add your source Git repository:
- **Name**: My Project
- **Source Repo Path**: `/path/to/repo`
-- **Source Repo URL**: https://github.com/user/repo (optional, for reference)
-- **Default Branch**: `main` (or your default branch)
+- **Source Repo URL**: https://github.com/user/repo (optional)
+- **Default Branch**: `main`
- **Default Agent**: Choose `opencode` or `claudecode`
-- **Agent Executable Path**: Path to the agent CLI (e.g., `/usr/local/bin/opencode` or `/usr/local/bin/claude`)
-- **Agent Parameters**: JSON configuration for model selection, arguments, etc.
-- **Max Concurrency**: Maximum concurrent agent runs across all WorkItems in the project (default: 3)
-
-### 2. Register a Target Repo
-
-Navigate to **Target Repos** and add a destination repository:
+- **Agent Executable Path**: Path to agent CLI (e.g., `/usr/local/bin/opencode`)
+- **Max Concurrency**: Max concurrent agent runs (default: 3)
-- **Name**: My Target Repo
-- **Repo Path**: `/path/to/target/repo`
-- **Default Branch**: `main` (or target's default branch)
-
-### 3. Create a WorkItem
+### 2. Create a WorkItem
Navigate to **WorkItems** and create a new work item:
-- **Select Project**: Choose the project this work item belongs to
+- **Select Project**: Choose the project
- **Title**: Feature description
- **Body**: Detailed description (optional)
- **Type**: Task type (`issue` or `feature-request`)
-This creates a WorkItem that will own a persistent workspace.
-
-### 4. Initialize Workspace
+This creates a WorkItem with a persistent workspace.
-The workspace is automatically initialized on the first agent run, or you can explicitly initialize it:
+### 3. Open a Pull Request
-- WorkItem creates a git worktree on a dedicated branch
-- Branch name format: `wi/`
-- Worktree path: `/worktrees//`
+In the WorkItem detail view, open a PR:
-### 5. Open a Pull Request
+- **Base branch**: Branch to merge into (e.g., `main`)
+- PR is automatically created with 1:1 relationship to the WorkItem
-Navigate to the WorkItem and open a PR:
-
-- **Base branch**: The branch to merge into (e.g., `main`)
-- The PR is automatically created with 1:1 relationship to the WorkItem
-- PR tracks base SHA, head SHA, and merge status
-
-### 6. Configure Agent (Per Project)
-
-Each project can be configured with agent settings:
-
-- **Default Agent**: Choose `opencode` or `claudecode`
-- **Agent Executable Path**: Path to the agent CLI (e.g., `/usr/local/bin/opencode` or `/usr/local/bin/claude`)
-- **Agent Parameters**: JSON configuration for model selection, arguments, etc.
-- **Max Concurrency**: Maximum concurrent agent runs across all WorkItems in the project (default: 3)
-
-### 7. Trigger Agent Runs
+### 4. Trigger Agent Runs
In the WorkItem detail view, trigger agent runs:
-- **Agent runs** use the project's default agent configuration
- **Prompt**: Your task description
- **Session ID**: Auto-generated as `wi-` for conversation continuity
-- The system automatically initializes the workspace if needed
-
-**Workspace Locking**: Only one agent run can be active per WorkItem at a time. If a run is in progress, new runs will be rejected with an error.
-
-**Project Concurrency**: The project's `max_agent_concurrency` setting limits how many agent runs can execute simultaneously across all WorkItems in that project.
-
-**Auto-Commit**: After each agent run completes successfully, the backend automatically stages and commits any changes made by the agent. This produces a clean commit history and stable PR diffs.
+- Workspace is automatically initialized if needed
-**Session Continuity**: Agent runs use WorkItem-scoped session IDs (`wi-`) by default, enabling resume functionality where agents can continue previous conversations.
+**Key Features:**
+- ✅ Only one agent run per WorkItem at a time
+- ✅ Auto-commit after each successful run
+- ✅ Session continuity across runs
-### 8. Review Pull Request
+### 5. Review & Merge
View the PR to review changes:
- **Overview**: PR details, status, and mergeability
- **Diff**: Code changes between base and head
-- **Commits**: Commit history for the PR, grouped by agent runs
-- **Files Changed**: List of files modified in the PR
-- **Checks**: Agent run history and status
+- **Commits**: Commit history grouped by agent runs
- **Reviews**: Review threads and comments
-### 9. Add Review Comments
+**Add Review Comments:**
+- Select file and line number
+- Choose severity (info/warning/error)
+- Trigger agent to address comments
-Create review threads on PRs:
+**Merge PR:**
+- Check mergeability (no conflicts, no running agents)
+- Choose merge strategy (`merge`, `squash`, or `rebase`)
+- Merge into base branch
-- **Severity**: Choose `info`, `warning`, or `error`
-- **Anchor**: Select file and line number
-- **Comments**: Add multiple comments to a thread
-- **Address with Agent**: Trigger agent to address review comments
-- **Resolve/Unresolve**: Mark threads as resolved or open
+### 6. Import to Target Repo (Optional)
-### 10. Merge PR
+Export PR changes and import to target repository:
-When satisfied with changes, merge the PR:
+1. Navigate to PR detail view
+2. Click "Export Patch" to generate patch file
+3. Select target repo and click Import
-- **Check mergeability**: No conflicts, no running agent runs
-- **Choose merge strategy**: `merge`, `squash`, or `rebase`
-- **Merge into base branch**: Execute merge operation
+GitVibe will:
+- Generate patch from PR diff
+- Apply patch using `git apply --3way`
+- Create commit with PR metadata
+- Record import in history
-**Merge Gates**:
-- PR must be in `open` status
-- No agent runs can be running for the WorkItem
-- Workspace lock must be free
-- No merge conflicts
+---
-### 11. Update Base / Rebase
+## Configuration
-Update PR base to latest base branch:
+### Environment Variables
-- **Update Base**: Refresh base SHA to latest base branch
-- **Rebase**: Optionally rebase head branch onto new base
-- Useful when base branch has moved forward
+Create a `.env` file in the `backend` directory:
-### 12. Export Patch
+```env
+PORT=11031
+HOST=127.0.0.1
+DATABASE_URL=./data/db.sqlite
+STORAGE_BASE_DIR=/tmp/git-vibe
+LOG_LEVEL=info
+```
-Export PR changes as a patch file:
+### Storage Location
-- Navigate to PR detail view
-- Click "Export Patch"
-- Patch is generated from `base_sha..head_sha`
-- Save patch file for manual application
+All data is stored in:
+- **Linux/Mac**: `/tmp/git-vibe/`
+- **Windows**: `%TEMP%\git-vibe\`
-### 13. Import to Target Repo
+To use custom storage, set `STORAGE_BASE_DIR` in `.env`.
-Optionally import changes to your target repository:
+---
-- Navigate to PR detail view
-- Select target repo
-- Click Import
+## Agent Adapters
-GitVibe will:
+GitVibe supports multiple AI coding agents:
-1. Generate a patch from PR diff
-2. Apply patch to target repo using `git apply --3way`
-3. Create a commit with PR metadata
-4. Record import in history
+### OpenCode
+- **Key**: `opencode`
+- **Executable**: `opencode` CLI
+- **Features**: Full agent execution, model selection, session management
-### 14. Clean Up
+### ClaudeCode
+- **Key**: `claudecode`
+- **Executable**: `claude` CLI
+- **Features**: Full agent execution with `--session-id` support
-When done, delete the WorkItem to:
+### Adding New Agents
-- Remove the worktree
-- Delete the PR
-- Delete all associated records
+Create a new adapter class extending `AgentAdapter` and implement:
+- `validate()` - Check executable availability
+- `run()` - Execute agent with prompt
+- `correctWithReviewComments()` - Resume/correct with review feedback
+- `getModels()` - List available models
+- `cancel()` - Cancel running process
+- `getStatus()` - Check run status
+
+---
+
+
+📖 Advanced Documentation
+
+## Architecture
+
+### System Architecture
+
+```
+┌─────────────────────────────────────────────────────────────────────────┐
+│ GitVibe System │
+├─────────────────────────────────────────────────────────────────────────┤
+│ │
+│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
+│ │ Frontend │◄──►│ Backend │◄──►│ Database │ │
+│ │ (React) │ │ (Fastify) │ │ (SQLite) │ │
+│ └──────────────┘ └──────────────┘ └──────────────┘ │
+│ │ │ │ │
+│ │ ▼ │ │
+│ │ ┌──────────────┐ │ │
+│ │ │ Agent Service │ │ │
+│ │ └──────────────┘ │ │
+│ │ │ │ │
+│ │ ▼ │ │
+│ │ ┌──────────────┐ │ │
+│ │ │ Agent │ │ │
+│ │ │ Adapters │ │ │
+│ │ │ (OpenCode, │ │ │
+│ │ │ ClaudeCode) │ │ │
+│ │ └──────────────┘ │ │
+│ │ │ │ │
+│ │ ▼ │ │
+│ │ ┌──────────────┐ │ │
+│ └─────────────►│ Git │ │ │
+│ │ Service │ │ │
+│ └──────────────┘ │ │
+│ │ │ │
+│ ▼ │ │
+│ ┌──────────────┐ │ │
+│ │ Relay Repo │ │ │
+│ │ + Worktrees │ │ │
+│ └──────────────┘ │ │
+│ │ │ │
+│ ▼ │ │
+│ ┌──────────────┐ │ │
+│ │ Source Repo │ │ │
+│ └──────────────┘ │ │
+└─────────────────────────────────────────────────────────────────────────┘
+```
+
+### Data Flow
+
+1. **User** creates WorkItem in UI
+2. **Backend** initializes workspace (git worktree + branch)
+3. **User** triggers agent run with prompt
+4. **Backend** spawns agent in worktree workspace
+5. **Agent** edits files in worktree
+6. **Backend** auto-commits changes after run
+7. **PR** is created/updated with new diff
+8. **User** reviews PR and optionally adds comments
+9. **User** merges PR (or imports to target repo)
## Project Structure
@@ -370,181 +314,27 @@ git-vibe/
├── backend/ # Fastify API + SQLite + Git integration
│ ├── src/
│ │ ├── routes/ # API route handlers
-│ │ │ ├── projects.ts
-│ │ │ ├── targetRepos.ts
-│ │ │ ├── pullRequests.ts
-│ │ │ ├── agentRuns.ts
-│ │ │ ├── reviews.ts
-│ │ │ └── workitems.ts
│ │ ├── services/ # Business logic
-│ │ │ ├── AgentService.ts
-│ │ │ ├── AgentAdapter.ts
-│ │ │ ├── OpenCodeAgentAdapter.ts
-│ │ │ ├── ClaudeCodeAgentAdapter.ts
-│ │ │ ├── GitService.ts
-│ │ │ ├── GitWorktreeService.ts
-│ │ │ ├── GitCommitService.ts
-│ │ │ ├── GitFileService.ts
-│ │ │ ├── GitRelayService.ts
-│ │ │ ├── PRService.ts
-│ │ │ ├── WorkspaceService.ts
-│ │ │ ├── PromptBuilder.ts
-│ │ │ └── ModelsCache.ts
│ │ ├── repositories/ # Database access layer
-│ │ │ ├── ProjectsRepository.ts
-│ │ │ ├── TargetReposRepository.ts
-│ │ │ ├── WorkItemsRepository.ts
-│ │ │ ├── PullRequestsRepository.ts
-│ │ │ ├── AgentRunsRepository.ts
-│ │ │ ├── ReviewThreadsRepository.ts
-│ │ │ └── ReviewCommentsRepository.ts
-│ │ ├── mappers/ # Database to DTO mappers
-│ │ │ ├── projects.ts
-│ │ │ ├── targetRepos.ts
-│ │ │ ├── workItems.ts
-│ │ │ ├── pullRequests.ts
-│ │ │ ├── agentRuns.ts
-│ │ │ └── reviews.ts
-│ │ ├── models/ # Drizzle schema
-│ │ │ └── schema.ts
-│ │ ├── middleware/ # Fastify middleware
-│ │ │ └── setup.ts
-│ │ ├── db/ # Database client and migrations
-│ │ │ ├── client.ts
-│ │ │ ├── migrate-cli.ts
-│ │ │ └── migrations.ts
-│ │ ├── config/ # Configuration
-│ │ │ └── storage.ts
-│ │ ├── types/ # TypeScript types
-│ │ │ └── models.ts
-│ │ ├── utils/ # Utilities
-│ │ │ └── storage.ts
-│ │ └── server.ts # Server entry point
-│ ├── drizzle/ # Database migrations
-│ ├── package.json
-│ ├── tsconfig.json
-│ └── vitest.config.mjs
+│ │ ├── mappers/ # Database to DTO mappers
+│ │ ├── models/ # Drizzle schema
+│ │ └── db/ # Database client and migrations
├── frontend/ # React + Vite application
│ ├── src/
│ │ ├── components/ # UI components
-│ │ │ ├── ui/ # Base UI components
-│ │ │ │ ├── Button.tsx
-│ │ │ │ ├── Input.tsx
-│ │ │ │ ├── Modal.tsx
-│ │ │ │ ├── Tabs.tsx
-│ │ │ │ ├── Select.tsx
-│ │ │ │ ├── Textarea.tsx
-│ │ │ │ ├── Pagination.tsx
-│ │ │ │ ├── LogPane.tsx
-│ │ │ │ ├── StatusBadge.tsx
-│ │ │ │ ├── EmptyState.tsx
-│ │ │ │ └── Skeleton.tsx
-│ │ │ ├── project/ # Project-related components
-│ │ │ │ ├── ProjectHeader.tsx
-│ │ │ │ ├── ProjectShell.tsx
-│ │ │ │ ├── OverviewTab.tsx
-│ │ │ │ ├── CodeTab.tsx
-│ │ │ │ ├── PullRequestsTab.tsx
-│ │ │ │ ├── WorkItemsTab.tsx
-│ │ │ │ ├── SettingsTab.tsx
-│ │ │ │ ├── ActionsTab.tsx
-│ │ │ │ └── TabNavigation.tsx
-│ │ │ ├── pr/ # PR-related components
-│ │ │ │ ├── PRDetail.tsx
-│ │ │ │ ├── OverviewTab.tsx
-│ │ │ │ ├── DiffReviewTab.tsx
-│ │ │ │ ├── CommitsTab.tsx
-│ │ │ │ ├── FilesChangedTab.tsx
-│ │ │ │ ├── ChecksTab.tsx
-│ │ │ │ ├── ConversationTab.tsx
-│ │ │ │ └── AgentRunsTab.tsx
-│ │ │ ├── workitem/ # WorkItem-related components
-│ │ │ │ ├── WorkItemDetail.tsx
-│ │ │ │ ├── DiscussionTab.tsx
-│ │ │ │ ├── LogDetailTab.tsx
-│ │ │ │ ├── PRStatusTab.tsx
-│ │ │ │ ├── TaskManagementTab.tsx
-│ │ │ │ ├── AgentConfigTab.tsx
-│ │ │ │ └── CreateWorkItemModal.tsx
-│ │ │ ├── review/ # Review-related components
-│ │ │ │ ├── ThreadComposer.tsx
-│ │ │ │ ├── ThreadActions.tsx
-│ │ │ │ ├── CommentComposer.tsx
-│ │ │ │ └── ThreadStatusBadge.tsx
-│ │ │ ├── diff/ # Diff viewer
-│ │ │ │ └── DiffViewer.tsx
-│ │ │ ├── agent/ # Agent-related components
-│ │ │ │ └── AgentRunConfigForm.tsx
-│ │ │ ├── worktree/ # Worktree status
-│ │ │ │ ├── WorktreeStatus.tsx
-│ │ │ │ └── WorktreeStatusBadge.tsx
-│ │ │ └── shared/ # Shared components
-│ │ │ ├── Layout.tsx
-│ │ │ ├── ErrorBoundary.tsx
-│ │ │ └── Toast.tsx
│ │ ├── routes/ # TanStack Router config
-│ │ │ ├── __root.tsx
-│ │ │ ├── index.tsx
-│ │ │ ├── projects/
-│ │ │ │ ├── index.tsx
-│ │ │ │ ├── $projectName.tsx
-│ │ │ │ ├── $projectName.index.tsx
-│ │ │ │ ├── $projectName.code.tsx
-│ │ │ │ ├── $projectName.actions.tsx
-│ │ │ │ ├── $projectName.pullrequests.tsx
-│ │ │ │ ├── $projectName.workitems.tsx
-│ │ │ │ └── $projectName.settings.tsx
-│ │ │ └── target-repos/
-│ │ │ ├── index.tsx
-│ │ │ └── $id.tsx
│ │ ├── hooks/ # React hooks
-│ │ │ ├── useAgentRunPolling.ts
-│ │ │ ├── useBranchSelector.ts
-│ │ │ ├── useDiffView.ts
-│ │ │ ├── useKeyboardShortcuts.ts
-│ │ │ ├── useModels.ts
-│ │ │ ├── usePR.ts
-│ │ │ ├── useReviewThreads.ts
-│ │ │ ├── useStreamingLogs.ts
-│ │ │ ├── useWorkItem.ts
-│ │ │ ├── useWorkItemRefresh.ts
-│ │ │ └── useWorktreeManagement.ts
-│ │ ├── lib/ # API client and utilities
-│ │ │ ├── api.ts
-│ │ │ ├── datetime.ts
-│ │ │ ├── utils.ts
-│ │ │ └── validation.ts
-│ │ ├── types/ # TypeScript types
-│ │ │ └── index.ts
-│ │ ├── index.css
-│ │ └── main.tsx
-│ ├── package.json
-│ ├── tsconfig.json
-│ ├── vite.config.ts
-│ ├── tailwind.config.js
-│ └── postcss.config.js
+│ │ └── lib/ # API client and utilities
├── shared/ # Shared types and utilities
-│ ├── src/
-│ │ ├── types/ # Common types
-│ │ │ ├── models.ts
-│ │ │ ├── requests.ts
-│ │ │ ├── responses.ts
-│ │ │ └── common.ts
-│ │ ├── codec/ # Custom codecs
-│ │ │ └── datetime.ts
-│ │ └── index.ts
-│ ├── package.json
-│ └── tsconfig.json
-├── package.json # Root package.json with workspace scripts
-├── PLAN.md # Architecture and design document
-└── README.md # This file
+│ └── src/
+│ └── types/ # Common types
+└── package.json # Root package.json with workspace scripts
```
## API Endpoints
### Projects
-
-- `GET /api/projects` - List all projects with pagination
+- `GET /api/projects` - List all projects
- `POST /api/projects` - Create a project
- `GET /api/projects/:id` - Get project details
- `PATCH /api/projects/:id` - Update project settings
@@ -555,46 +345,30 @@ git-vibe/
- `GET /api/projects/:id/files/content` - Get file content
- `GET /api/models` - List available agent models
- `POST /api/models/refresh` - Refresh model cache
-- `GET /api/branches` - List branches by repo path
-
-### Target Repos
-
-- `GET /api/target-repos` - List all target repos
-- `POST /api/target-repos` - Create a target repo
-- `GET /api/target-repos/:id` - Get target repo details
### WorkItems
-
-- `GET /api/workitems` - List work items with optional project filter and pagination
+- `GET /api/workitems` - List work items
- `POST /api/projects/:projectId/work-items` - Create a work item
- `GET /api/workitems/:id` - Get work item details
- `PATCH /api/workitems/:id` - Update work item
- `DELETE /api/workitems/:id` - Delete work item
-- `POST /api/work-items/:id/init-workspace` - Initialize workspace (optional)
- `POST /api/workitems/:id/start` - Start agent run
- `POST /api/workitems/:id/resume` - Resume task with same session_id
- `GET /api/workitems/:id/tasks` - List all runs for work item
- `POST /api/workitems/:id/tasks/:taskId/cancel` - Cancel running task
-- `POST /api/workitems/:id/tasks/:taskId/restart` - Restart task with same prompt
-- `GET /api/workitems/:id/tasks/:taskId/status` - Get task status
-- `GET /api/workitems/:id/prs` - Get PRs for work item
- `POST /api/workitems/:id/create-pr` - Create PR from work item
### Pull Requests
-
-- `GET /api/pull-requests` - List PRs (with optional project filter and pagination)
+- `GET /api/pull-requests` - List PRs
- `GET /api/pull-requests/:id` - Get PR details
- `GET /api/pull-requests/:id/diff` - Get PR diff
- `GET /api/pull-requests/:id/commits` - Get PR commits
-- `GET /api/pull-requests/:id/commits-with-tasks` - Get PR commits grouped by tasks
-- `GET /api/pull-requests/:id/statistics` - Get PR statistics
- `POST /api/pull-requests/:id/merge` - Merge PR
- `POST /api/pull-requests/:id/close` - Close PR without merge
- `POST /api/pull-requests/:id/update-base` - Update base branch and optionally rebase
- `GET /api/pull-requests/:id/patch` - Export patch
### Agent Runs
-
- `GET /api/agent-runs/:id` - Get run status and logs
- `POST /api/agent-runs/:id/cancel` - Cancel running agent
- `GET /api/agent-runs/:id/stdout` - Get stdout log
@@ -602,207 +376,56 @@ git-vibe/
- `GET /api/agent-runs/:id/logs` - Get both stdout and stderr logs
### Reviews
-
- `GET /api/pull-requests/:id/reviews/threads` - List review threads
- `POST /api/pull-requests/:id/reviews/threads` - Create thread
-- `GET /api/pull-requests/:id/reviews/threads/:threadId` - Get thread details
- `POST /api/pull-requests/:id/reviews/threads/:threadId/resolve` - Resolve thread
-- `POST /api/pull-requests/:id/reviews/threads/:threadId/unresolve` - Unresolve thread
- `POST /api/pull-requests/:id/reviews/threads/:threadId/comments` - Add comment
- `POST /api/pull-requests/:id/reviews/threads/:threadId/address` - Address with agent
-- `POST /api/pull-requests/:id/reviews/threads/:threadId/resume` - Resume from thread
-
-## Storage
-
-All data is stored in the system temp directory:
-
-- **Linux/Mac**: `/tmp/git-vibe/`
-- **Windows**: `%TEMP%\git-vibe\`
-
-Directory structure:
-
-```
-git-vibe/
-├── data/
-│ └── db.sqlite # SQLite database
-├── logs/ # Agent run logs
-│ ├── agent-run-.log
-│ ├── agent-run--stdout.log
-│ └── agent-run--stderr.log
-└── worktrees/ # Git worktrees for WorkItems
- └── / # WorkItem workspace
-```
-### Custom Storage Location
+## Workspace Locking
-To use a custom storage location, set the `STORAGE_BASE_DIR` environment variable in `backend/.env`:
-
-```env
-STORAGE_BASE_DIR=/custom/path/to/git-vibe-data
-```
-
-## Workspace Locking Mechanism
-
-GitVibe implements workspace locking at the WorkItem level to ensure serialized agent runs:
+GitVibe implements workspace locking at the WorkItem level:
- **Lock Fields**: `lock_owner_run_id` and `lock_expires_at` on WorkItem table
-- **Acquisition**: Before starting an agent run, the system acquires a lock on the WorkItem
-- **TTL**: Locks have a time-to-live (TTL, default: 6 hours) for crash recovery
-- **Release**: Lock is released after agent run finalization (success/failure/cancel)
-- **Conflict**: If a lock is already held and not expired, new runs are rejected with an error
-
-This prevents concurrent agent runs from corrupting the workspace state.
-
-## Project Concurrency Limits
-
-In addition to WorkItem-level locking, projects have configurable concurrency limits:
-
-- **Per-Project Limit**: `max_agent_concurrency` setting (default: 3)
-- **Enforcement**: Limits concurrent agent runs across all WorkItems in a project
-- **Purpose**: Prevents resource exhaustion when multiple WorkItems are active
-- **Tracking**: Managed in-memory by `AgentService`
-
-## Agent Adapters
-
-GitVibe supports multiple AI coding agents through an adapter system:
-
-### OpenCode Agent
-- **Key**: `opencode`
-- **Executable**: `opencode` CLI
-- **Features**: Full agent execution, model selection, session management
-
-### ClaudeCode Agent
-- **Key**: `claudecode`
-- **Executable**: `claude` CLI
-- **Features**: Full agent execution with `--session-id` support for conversation continuity
-
-### Adding New Agents
-
-To add a new agent adapter:
-
-1. Create a new adapter class extending `AgentAdapter`
-2. Implement required methods:
- - `validate()`: Check executable availability
- - `run()`: Execute agent with prompt
- - `correctWithReviewComments()`: Resume/correct with review feedback
- - `getModels()`: List available models
- - `cancel()`: Cancel running process
- - `getStatus()`: Check run status
-3. Register the adapter in `AgentService` constructor
-4. Update `AgentType` union type in shared types
+- **Acquisition**: Before starting an agent run, the system acquires a lock
+- **TTL**: Locks have a time-to-live (default: 6 hours) for crash recovery
+- **Release**: Lock is released after agent run finalization
+- **Conflict**: If a lock is already held and not expired, new runs are rejected
## Development
### Running Tests
-Run tests for the backend:
-
```bash
cd backend
npm test
```
-Run tests once:
-
-```bash
-cd backend
-npm run test:run
-```
-
-### Code Style
-
-The project uses:
-- **ESLint** for linting
-- **Prettier** for code formatting
-- **TypeScript** strict mode
-
-Run linting and formatting:
-
-```bash
-npm run lint
-npm run format
-```
-
-Lint/format individual packages:
-
-```bash
-npm run lint:backend
-npm run format:backend
-# etc.
-```
-
### Database Migrations
-Generate new migrations:
-
```bash
cd backend
-npm run db:generate
-```
-
-Run migrations:
-
-```bash
-npm run db:migrate
+npm run db:generate # Generate new migrations
+npm run db:migrate # Run migrations
+npm run db:studio # View database with Drizzle Studio
```
-View database with Drizzle Studio:
+### Code Style
```bash
-cd backend
-npm run db:studio
+npm run lint # ESLint
+npm run format # Prettier
```
-### API Development
-
-When adding new API endpoints:
-
-1. Add route handler in `backend/src/routes/`
-2. Add repository methods in `backend/src/repositories/`
-3. Add service methods in `backend/src/services/`
-4. Add DTOs in `shared/src/types/`
-5. Update frontend API client in `frontend/src/lib/api.ts`
-
-### Frontend Development
-
-When adding new UI components:
-
-1. Create component in `frontend/src/components/`
-2. Add hook in `frontend/src/hooks/` if needed
-3. Add route in `frontend/src/routes/`
-4. Update API client if needed
+
-## Architecture Notes
-
-### Session Management
-- Agent runs use WorkItem-scoped session IDs by default: `wi-`
-- This enables conversation continuity across multiple runs
-- Resume functionality creates new AgentRun records but reuses the same session_id
-
-### Auto-Commit Behavior
-- Only successful agent runs trigger auto-commit
-- Failed runs leave workspace unchanged for debugging
-- Commit messages follow format: `AgentRun : `
-
-### Review System
-- Review threads can be created on PRs with file/line anchors
-- Comments can be added to threads
-- Threads can be resolved/unresolved
-- Review comments can trigger agent corrections via `address` endpoint
-
-### Import System
-- Patch-based import strategy (currently only strategy)
-- Generates patch from PR diff (`base_sha..head_sha`)
-- Applies patch to target repository using `git apply --3way`
-- Creates commit with PR metadata
-- Tracks import history with status and logs
+---
## Troubleshooting
### Agent Not Found
If you get "Executable not found" errors:
-
1. Verify that the agent executable is in your PATH
2. Or provide the full path in project settings
3. Check that the executable has execute permissions
@@ -810,7 +433,6 @@ If you get "Executable not found" errors:
### Workspace Lock Issues
If a WorkItem is stuck in locked state:
-
1. Check if an agent run is actually running
2. If not, the lock TTL will expire (default: 6 hours)
3. Or manually release the lock via the database
@@ -818,7 +440,6 @@ If a WorkItem is stuck in locked state:
### Git Worktree Errors
If worktree operations fail:
-
1. Ensure that the relay repository path is correct
2. Check that the repository is a valid Git repo
3. Run `git worktree prune` to clean up stale worktrees
@@ -826,28 +447,20 @@ If worktree operations fail:
### Merge Conflicts
If merge fails due to conflicts:
-
1. Update the PR base to the latest base branch
2. Rebase the head branch onto the new base
3. Resolve conflicts manually in the worktree
4. Try merge again
-### Database Issues
-
-If you encounter database issues:
-
-1. Delete the database file: `data/db.sqlite`
-2. Run migrations again: `npm run db:migrate`
-3. Note: This will delete all your data
-
### Port Already in Use
If you get "Port already in use" error:
-
1. Check if another instance is running
2. Or change the PORT in `backend/.env`
3. Default port is 11031
+---
+
## Contributing
Contributions are welcome! Please:
@@ -859,10 +472,12 @@ Contributions are welcome! Please:
5. Ensure all tests pass
6. Submit a pull request
+---
+
## License
MIT
---
-For detailed architecture and design decisions, see [PLAN.md](PLAN.md).
+**For detailed architecture and design decisions, see [PLAN.md](docs/PLAN.md).**
diff --git a/backend/drizzle/0001_optimized_workflow_refactor.sql b/backend/drizzle/0001_optimized_workflow_refactor.sql
new file mode 100644
index 0000000..ca60498
--- /dev/null
+++ b/backend/drizzle/0001_optimized_workflow_refactor.sql
@@ -0,0 +1,314 @@
+-- Migration: Optimized Workflow Refactor
+-- Implements the optimized workflow design as specified in docs/architecture/optimized_workflow_design.md
+--
+-- Changes:
+-- 1. Adds mirror_repo_path to projects table
+-- 2. Drops unused tables: imports, target_repos
+-- 3. Creates infrastructure tables: workflows, workflow_runs, node_runs, command_execs, event_outbox
+-- 4. Creates tasks table (Domain resource) - separates Task from AgentRun
+-- 5. Creates worktrees table (Op resource) - separates Worktree from work_items fields
+-- 6. Creates git_ops table (Op resource) - new resource type for git operations
+-- 7. Modifies agent_runs table: adds task_id, idempotency_key, makes session_id nullable, adds pid
+-- 8. Adds idempotency_key to pull_requests and work_items tables
+--
+-- Key principles:
+-- - Domain resources (WorkItem, Task, PullRequest) vs Op resources (Worktree, AgentRun, GitOps, CommandExec)
+-- - Task is a Domain resource that orchestrates AgentRun (Op resource)
+-- - Each resource table has idempotency_key for idempotency enforcement
+
+-- Add mirror_repo_path to projects table
+-- Multiple projects with the same source path share the same mirror repo
+CREATE TABLE IF NOT EXISTS "projects_new" (
+ "id" text PRIMARY KEY NOT NULL,
+ "name" text NOT NULL,
+ "source_repo_path" text NOT NULL,
+ "source_repo_url" text,
+ "mirror_repo_path" text NOT NULL DEFAULT '',
+ "relay_repo_path" text NOT NULL,
+ "default_branch" text NOT NULL,
+ "default_agent" text NOT NULL DEFAULT 'opencode',
+ "agent_params" text,
+ "max_agent_concurrency" integer NOT NULL DEFAULT 3,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch())
+);
+
+-- Copy data from old projects to new projects
+INSERT INTO "projects_new" (
+ "id", "name", "source_repo_path", "source_repo_url", "relay_repo_path",
+ "default_branch", "default_agent", "agent_params", "max_agent_concurrency",
+ "created_at", "updated_at"
+)
+SELECT
+ "id", "name", "source_repo_path", "source_repo_url", "relay_repo_path",
+ "default_branch", "default_agent", "agent_params", "max_agent_concurrency",
+ "created_at", "updated_at"
+FROM "projects";
+
+-- Drop old projects table
+DROP TABLE IF EXISTS "projects";
+
+-- Rename new table to projects
+ALTER TABLE "projects_new" RENAME TO "projects";
+
+-- Recreate indexes for projects
+CREATE UNIQUE INDEX IF NOT EXISTS "projects_name_unique" ON "projects" ("name");
+
+-- Drop unused tables that are not in the final schema
+DROP TABLE IF EXISTS "imports";
+DROP TABLE IF EXISTS "target_repos";
+
+-- Create infrastructure tables (must be created before tables that reference them)
+
+-- Create workflows table
+CREATE TABLE IF NOT EXISTS "workflows" (
+ "id" text PRIMARY KEY NOT NULL,
+ "project_id" text NOT NULL,
+ "name" text NOT NULL,
+ "definition" text NOT NULL,
+ "is_default" integer NOT NULL DEFAULT 0,
+ "version" integer NOT NULL DEFAULT 1,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("project_id") REFERENCES "projects"("id") ON DELETE CASCADE
+);
+
+CREATE INDEX IF NOT EXISTS "idx_workflows_project_id" ON "workflows" ("project_id");
+CREATE INDEX IF NOT EXISTS "idx_workflows_project_name_unique" ON "workflows" ("project_id", "name");
+
+-- Create workflow_runs table
+CREATE TABLE IF NOT EXISTS "workflow_runs" (
+ "id" text PRIMARY KEY NOT NULL,
+ "workflow_id" text NOT NULL,
+ "work_item_id" text NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed', 'blocked', 'skipped')) DEFAULT 'pending',
+ "current_step_id" text,
+ "started_at" integer,
+ "finished_at" integer,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("workflow_id") REFERENCES "workflows"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE
+);
+
+CREATE INDEX IF NOT EXISTS "idx_workflow_runs_workflow_id" ON "workflow_runs" ("workflow_id");
+CREATE INDEX IF NOT EXISTS "idx_workflow_runs_work_item_id" ON "workflow_runs" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_workflow_runs_status" ON "workflow_runs" ("status");
+
+-- Create node_runs table (must be created before tasks, worktrees, git_ops reference it)
+CREATE TABLE IF NOT EXISTS "node_runs" (
+ "id" text PRIMARY KEY NOT NULL,
+ "run_id" text NOT NULL,
+ "workflow_run_id" text NOT NULL,
+ "node_id" text NOT NULL,
+ "resource_type" text NOT NULL CHECK("resource_type" IN ('WorkItem', 'Worktree', 'Task', 'AgentRun', 'PullRequest', 'GitOps', 'CommandExec')),
+ "subject_kind" text NOT NULL CHECK("subject_kind" IN ('workitem', 'task', 'pr_request', 'worktree')),
+ "subject_id" text NOT NULL,
+ "subject_version_at_start" integer NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed', 'canceled', 'blocked')) DEFAULT 'pending',
+ "attempt" integer NOT NULL DEFAULT 1,
+ "idempotency_key" text,
+ "input" text NOT NULL,
+ "output" text NOT NULL,
+ "error" text,
+ "started_at" integer,
+ "finished_at" integer,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("workflow_run_id") REFERENCES "workflow_runs"("id") ON DELETE CASCADE
+);
+
+CREATE INDEX IF NOT EXISTS "idx_node_runs_workflow_run_id" ON "node_runs" ("workflow_run_id");
+CREATE INDEX IF NOT EXISTS "idx_node_runs_node_id" ON "node_runs" ("node_id");
+CREATE INDEX IF NOT EXISTS "idx_node_runs_resource_type" ON "node_runs" ("resource_type");
+CREATE INDEX IF NOT EXISTS "idx_node_runs_subject" ON "node_runs" ("subject_kind", "subject_id");
+CREATE INDEX IF NOT EXISTS "idx_node_runs_idempotency_key" ON "node_runs" ("idempotency_key");
+CREATE INDEX IF NOT EXISTS "idx_node_runs_status" ON "node_runs" ("status");
+
+-- Create command_execs table (Op resource)
+-- Uses file paths for stdout/stderr like agent_runs, instead of storing text directly
+CREATE TABLE IF NOT EXISTS "command_execs" (
+ "id" text PRIMARY KEY NOT NULL,
+ "work_item_id" text NOT NULL,
+ "node_run_id" text,
+ "command" text NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed')) DEFAULT 'pending',
+ "exit_code" integer,
+ "stdout_path" text,
+ "stderr_path" text,
+ "log_path" text,
+ "idempotency_key" text,
+ "started_at" integer,
+ "completed_at" integer,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE
+);
+
+CREATE INDEX IF NOT EXISTS "idx_command_execs_work_item_id" ON "command_execs" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_command_execs_node_run_id" ON "command_execs" ("node_run_id");
+CREATE INDEX IF NOT EXISTS "idx_command_execs_status" ON "command_execs" ("status");
+CREATE INDEX IF NOT EXISTS "idx_command_execs_idempotency_key" ON "command_execs" ("idempotency_key");
+CREATE UNIQUE INDEX IF NOT EXISTS "command_execs_idempotency_key_unique" ON "command_execs" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Create event_outbox table
+CREATE TABLE IF NOT EXISTS "event_outbox" (
+ "id" text PRIMARY KEY NOT NULL,
+ "event_id" text NOT NULL UNIQUE,
+ "event_type" text NOT NULL,
+ "event_data" text NOT NULL,
+ "subject_kind" text NOT NULL,
+ "subject_id" text NOT NULL,
+ "resource_version" integer,
+ "caused_by" text,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "processed_at" integer,
+ "retry_count" integer NOT NULL DEFAULT 0
+);
+
+CREATE INDEX IF NOT EXISTS "idx_event_outbox_event_id" ON "event_outbox" ("event_id");
+CREATE INDEX IF NOT EXISTS "idx_event_outbox_subject" ON "event_outbox" ("subject_kind", "subject_id");
+CREATE INDEX IF NOT EXISTS "idx_event_outbox_processed" ON "event_outbox" ("processed_at");
+
+-- Create tasks table (Domain resource)
+CREATE TABLE IF NOT EXISTS "tasks" (
+ "id" text PRIMARY KEY NOT NULL,
+ "work_item_id" text NOT NULL,
+ "task_type" text NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed', 'canceled', 'blocked')) DEFAULT 'pending',
+ "input" text NOT NULL DEFAULT '{}',
+ "output" text NOT NULL DEFAULT '{}',
+ "current_agent_run_id" text,
+ "idempotency_key" text,
+ "node_run_id" text,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("node_run_id") REFERENCES "node_runs"("id") ON DELETE SET NULL
+);
+
+CREATE INDEX IF NOT EXISTS "idx_tasks_work_item_id" ON "tasks" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_tasks_task_type" ON "tasks" ("task_type");
+CREATE INDEX IF NOT EXISTS "idx_tasks_status" ON "tasks" ("status");
+CREATE INDEX IF NOT EXISTS "idx_tasks_idempotency_key" ON "tasks" ("idempotency_key");
+CREATE INDEX IF NOT EXISTS "idx_tasks_current_agent_run_id" ON "tasks" ("current_agent_run_id");
+CREATE UNIQUE INDEX IF NOT EXISTS "tasks_idempotency_key_unique" ON "tasks" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Create worktrees table (Op resource)
+CREATE TABLE IF NOT EXISTS "worktrees" (
+ "id" text PRIMARY KEY NOT NULL,
+ "work_item_id" text NOT NULL,
+ "path" text NOT NULL,
+ "branch" text NOT NULL,
+ "repo_sha" text,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed', 'canceled')) DEFAULT 'pending',
+ "idempotency_key" text,
+ "node_run_id" text,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("node_run_id") REFERENCES "node_runs"("id") ON DELETE SET NULL
+);
+
+CREATE INDEX IF NOT EXISTS "idx_worktrees_work_item_id" ON "worktrees" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_worktrees_status" ON "worktrees" ("status");
+CREATE INDEX IF NOT EXISTS "idx_worktrees_idempotency_key" ON "worktrees" ("idempotency_key");
+CREATE UNIQUE INDEX IF NOT EXISTS "worktrees_idempotency_key_unique" ON "worktrees" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Create git_ops table (Op resource)
+CREATE TABLE IF NOT EXISTS "git_ops" (
+ "id" text PRIMARY KEY NOT NULL,
+ "work_item_id" text NOT NULL,
+ "operation" text NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('pending', 'running', 'succeeded', 'failed', 'canceled')) DEFAULT 'pending',
+ "input" text NOT NULL DEFAULT '{}',
+ "output" text NOT NULL DEFAULT '{}',
+ "idempotency_key" text,
+ "node_run_id" text,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("node_run_id") REFERENCES "node_runs"("id") ON DELETE SET NULL
+);
+
+CREATE INDEX IF NOT EXISTS "idx_git_ops_work_item_id" ON "git_ops" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_git_ops_operation" ON "git_ops" ("operation");
+CREATE INDEX IF NOT EXISTS "idx_git_ops_status" ON "git_ops" ("status");
+CREATE INDEX IF NOT EXISTS "idx_git_ops_idempotency_key" ON "git_ops" ("idempotency_key");
+CREATE UNIQUE INDEX IF NOT EXISTS "git_ops_idempotency_key_unique" ON "git_ops" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Modify agent_runs table: add task_id and idempotency_key
+-- SQLite doesn't support ALTER TABLE ADD COLUMN with foreign keys directly, so we recreate the table
+CREATE TABLE IF NOT EXISTS "agent_runs_new" (
+ "id" text PRIMARY KEY NOT NULL,
+ "project_id" text NOT NULL,
+ "work_item_id" text NOT NULL,
+ "task_id" text,
+ "agent_key" text NOT NULL,
+ "status" text NOT NULL CHECK("status" IN ('queued', 'running', 'succeeded', 'failed', 'cancelled')) DEFAULT 'queued',
+ "input_summary" text,
+ "input_json" text NOT NULL,
+ "session_id" text,
+ "linked_agent_run_id" text,
+ "log" text,
+ "log_path" text,
+ "stdout_path" text,
+ "stderr_path" text,
+ "head_sha_before" text,
+ "head_sha_after" text,
+ "commit_sha" text,
+ "pid" integer,
+ "idempotency_key" text,
+ "node_run_id" text,
+ "started_at" integer,
+ "finished_at" integer,
+ "created_at" integer NOT NULL DEFAULT (unixepoch()),
+ "updated_at" integer NOT NULL DEFAULT (unixepoch()),
+ FOREIGN KEY ("project_id") REFERENCES "projects"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("work_item_id") REFERENCES "work_items"("id") ON DELETE CASCADE,
+ FOREIGN KEY ("task_id") REFERENCES "tasks"("id") ON DELETE SET NULL
+);
+
+-- Copy data from old agent_runs to new agent_runs
+-- Note: pid and node_run_id don't exist in the old table, so we use NULL for them
+INSERT INTO "agent_runs_new" (
+ "id", "project_id", "work_item_id", "agent_key", "status", "input_summary", "input_json",
+ "session_id", "linked_agent_run_id", "log", "log_path", "stdout_path", "stderr_path",
+ "head_sha_before", "head_sha_after", "commit_sha", "pid", "node_run_id", "started_at", "finished_at",
+ "created_at", "updated_at"
+)
+SELECT
+ "id", "project_id", "work_item_id", "agent_key", "status", "input_summary", "input_json",
+ "session_id", "linked_agent_run_id", "log", "log_path", "stdout_path", "stderr_path",
+ "head_sha_before", "head_sha_after", "commit_sha", NULL as "pid", NULL as "node_run_id", "started_at", "finished_at",
+ "created_at", "updated_at"
+FROM "agent_runs";
+
+-- Drop old agent_runs table
+DROP TABLE IF EXISTS "agent_runs";
+
+-- Rename new table to agent_runs
+ALTER TABLE "agent_runs_new" RENAME TO "agent_runs";
+
+-- Recreate indexes for agent_runs
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_work_item_id" ON "agent_runs" ("work_item_id");
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_session_id" ON "agent_runs" ("session_id");
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_status" ON "agent_runs" ("status");
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_node_run_id" ON "agent_runs" ("node_run_id");
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_task_id" ON "agent_runs" ("task_id");
+CREATE INDEX IF NOT EXISTS "idx_agent_runs_idempotency_key" ON "agent_runs" ("idempotency_key");
+CREATE UNIQUE INDEX IF NOT EXISTS "agent_runs_idempotency_key_unique" ON "agent_runs" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Note: command_execs table is created above with idempotency_key already included
+
+-- Add idempotency_key & node_run_id to pull_requests table
+ALTER TABLE "pull_requests" ADD COLUMN "idempotency_key" text;
+ALTER TABLE "pull_requests" ADD COLUMN "node_run_id" text;
+CREATE INDEX IF NOT EXISTS "idx_pull_requests_idempotency_key" ON "pull_requests" ("idempotency_key");
+CREATE INDEX IF NOT EXISTS "idx_pull_requests_node_run_id" ON "pull_requests" ("node_run_id");
+CREATE UNIQUE INDEX IF NOT EXISTS "pull_requests_idempotency_key_unique" ON "pull_requests" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
+
+-- Add idempotency_key and node_run_id to work_items table
+ALTER TABLE "work_items" ADD COLUMN "idempotency_key" text;
+ALTER TABLE "work_items" ADD COLUMN "node_run_id" text;
+CREATE INDEX IF NOT EXISTS "idx_work_items_idempotency_key" ON "work_items" ("idempotency_key");
+CREATE INDEX IF NOT EXISTS "idx_work_items_node_run_id" ON "work_items" ("node_run_id");
+CREATE UNIQUE INDEX IF NOT EXISTS "work_items_idempotency_key_unique" ON "work_items" ("idempotency_key") WHERE "idempotency_key" IS NOT NULL;
diff --git a/backend/drizzle/0002_app_settings.sql b/backend/drizzle/0002_app_settings.sql
new file mode 100644
index 0000000..d946f31
--- /dev/null
+++ b/backend/drizzle/0002_app_settings.sql
@@ -0,0 +1,9 @@
+-- Global app settings (key-value). Used for default project settings when creating a project.
+CREATE TABLE IF NOT EXISTS "app_settings" (
+ "key" text PRIMARY KEY NOT NULL,
+ "value" text NOT NULL
+);
+
+-- Default values: defaultAgent = opencode, defaultAgentParams = {}
+INSERT OR IGNORE INTO "app_settings" ("key", "value") VALUES ('defaultAgent', 'opencode');
+INSERT OR IGNORE INTO "app_settings" ("key", "value") VALUES ('defaultAgentParams', '{}');
diff --git a/backend/eslint.config.js b/backend/eslint.config.js
index 761cf58..b7ac426 100644
--- a/backend/eslint.config.js
+++ b/backend/eslint.config.js
@@ -38,6 +38,9 @@ export default [
...tseslint.configs.recommended.rules,
...eslintConfigPrettier.rules,
'prettier/prettier': 'error',
+ // TypeScript already type-checks undefined identifiers; this rule commonly
+ // misfires on Node/Web globals in ESM/TS projects.
+ 'no-undef': 'off',
'no-unused-vars': 'off',
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
'@typescript-eslint/explicit-function-return-type': 'off',
diff --git a/backend/src/config/storage.ts b/backend/src/config/storage.ts
index c1d0543..945d494 100644
--- a/backend/src/config/storage.ts
+++ b/backend/src/config/storage.ts
@@ -22,6 +22,7 @@ export const STORAGE_CONFIG = {
patchesDir: path.join(baseTempDir, 'patches'),
worktreesDir: path.join(baseTempDir, 'worktrees'),
projectsDir: path.join(baseTempDir, 'projects'),
+ mirrorsDir: path.join(baseTempDir, 'mirrors'),
projectRoot,
} as const;
diff --git a/backend/src/db/client.ts b/backend/src/db/client.ts
index d341a41..e7de57b 100644
--- a/backend/src/db/client.ts
+++ b/backend/src/db/client.ts
@@ -1,4 +1,4 @@
-import Database from 'better-sqlite3';
+import { default as Database } from 'better-sqlite3';
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { STORAGE_CONFIG } from '../config/storage.js';
import { ensureStorageDirectories } from '../utils/storage.js';
@@ -18,6 +18,6 @@ export async function getDb() {
return db;
}
-export function getSqlite() {
+export function getSqlite(): Database.Database {
return new Database(STORAGE_CONFIG.dbPath);
}
diff --git a/backend/src/db/migrations.ts b/backend/src/db/migrations.ts
index 5f7cc7f..cd8e7cd 100644
--- a/backend/src/db/migrations.ts
+++ b/backend/src/db/migrations.ts
@@ -66,33 +66,6 @@ export async function runMigrations() {
}[];
const executedSet = new Set(executedMigrations.map((m) => m.filename));
- // Clean up any old migration entries from previous incomplete runs
- // Since this project hasn't been released, we can safely reset migration tracking
- // if the schema is incomplete
- const requiredTables = [
- 'projects',
- 'work_items',
- 'pull_requests',
- 'review_threads',
- 'review_comments',
- 'agent_runs',
- 'target_repos',
- ];
- const existingTables = sqlite
- .prepare("SELECT name FROM sqlite_master WHERE type='table'")
- .all() as { name: string }[];
- const existingTableNames = new Set(existingTables.map((t) => t.name));
-
- // Check if all required tables exist
- const allTablesExist = requiredTables.every((table) => existingTableNames.has(table));
-
- // If we have migration records but tables are missing, reset the migration tracking
- if (executedSet.size > 0 && !allTablesExist) {
- console.log('Detected incomplete schema, resetting migration tracking');
- sqlite.prepare('DELETE FROM _migrations').run();
- executedSet.clear();
- }
-
for (const file of files) {
if (executedSet.has(file)) {
console.log(`Skipping already executed migration: ${file}`);
diff --git a/backend/src/mappers/agentRuns.ts b/backend/src/mappers/agentRuns.ts
index 9f1235c..69b1ae3 100644
--- a/backend/src/mappers/agentRuns.ts
+++ b/backend/src/mappers/agentRuns.ts
@@ -13,6 +13,7 @@ export function toDTO(domain: AgentRunDomain): AgentRunDTO {
id: domain.id,
projectId: domain.projectId,
workItemId: domain.workItemId,
+ taskId: domain.taskId,
agentKey: domain.agentKey,
status: domain.status,
inputSummary: domain.inputSummary,
@@ -26,6 +27,9 @@ export function toDTO(domain: AgentRunDomain): AgentRunDTO {
headShaBefore: domain.headShaBefore,
headShaAfter: domain.headShaAfter,
commitSha: domain.commitSha,
+ pid: domain.pid,
+ idempotencyKey: domain.idempotencyKey,
+ nodeRunId: domain.nodeRunId ?? null,
startedAt: domain.startedAt?.toISOString() ?? null,
finishedAt: domain.finishedAt?.toISOString() ?? null,
createdAt: domain.createdAt.toISOString(),
@@ -41,6 +45,7 @@ export function toDomain(dto: AgentRunDTO): AgentRunDomain {
id: dto.id,
projectId: dto.projectId,
workItemId: dto.workItemId,
+ taskId: dto.taskId,
agentKey: dto.agentKey,
status: dto.status,
inputSummary: dto.inputSummary,
@@ -54,6 +59,9 @@ export function toDomain(dto: AgentRunDTO): AgentRunDomain {
headShaBefore: dto.headShaBefore,
headShaAfter: dto.headShaAfter,
commitSha: dto.commitSha,
+ pid: dto.pid,
+ idempotencyKey: dto.idempotencyKey,
+ nodeRunId: dto.nodeRunId ?? null,
startedAt: dto.startedAt ? new Date(dto.startedAt) : null,
finishedAt: dto.finishedAt ? new Date(dto.finishedAt) : null,
createdAt: new Date(dto.createdAt),
diff --git a/backend/src/mappers/index.ts b/backend/src/mappers/index.ts
index c44d10b..f70c8de 100644
--- a/backend/src/mappers/index.ts
+++ b/backend/src/mappers/index.ts
@@ -12,4 +12,3 @@ export {
reviewCommentToDTO,
reviewCommentToDomain,
} from './reviews.js';
-export { toDTO as targetRepoToDTO, toDomain as targetRepoToDomain } from './targetRepos.js';
diff --git a/backend/src/mappers/mappers.test.ts b/backend/src/mappers/mappers.test.ts
index 8e32387..6419643 100644
--- a/backend/src/mappers/mappers.test.ts
+++ b/backend/src/mappers/mappers.test.ts
@@ -14,7 +14,6 @@ import { toDTO as projectToDTO, toDomain as projectToDomain } from './projects.j
import { toDTO as workItemToDTO, toDomain as workItemToDomain } from './workItems.js';
import { toDTO as agentRunToDTO, toDomain as agentRunToDomain } from './agentRuns.js';
import { toDTO as pullRequestToDTO, toDomain as pullRequestToDomain } from './pullRequests.js';
-import { toDTO as targetRepoToDTO, toDomain as targetRepoToDomain } from './targetRepos.js';
import { reviewThreadToDTO, reviewThreadToDomain } from './reviews.js';
import type { Project as ProjectDomain } from '../types/models.js';
import {
@@ -22,7 +21,6 @@ import {
WorkItemSchema,
AgentRunSchema,
PullRequestSchema,
- TargetRepoSchema,
ReviewThreadSchema,
} from 'git-vibe-shared';
@@ -33,6 +31,7 @@ describe('Project mapper', () => {
name: 'test-project',
sourceRepoPath: '/path/to/source',
sourceRepoUrl: 'https://github.com/test/repo',
+ mirrorRepoPath: '/path/to/mirror.git',
relayRepoPath: '/path/to/relay',
defaultBranch: 'main',
defaultAgent: 'opencode',
@@ -59,6 +58,7 @@ describe('Project mapper', () => {
name: 'test-project',
sourceRepoPath: '/path/to/source',
sourceRepoUrl: 'https://github.com/test/repo',
+ mirrorRepoPath: '/path/to/mirror.git',
relayRepoPath: '/path/to/relay',
defaultBranch: 'main',
defaultAgent: 'opencode' as const,
@@ -80,6 +80,7 @@ describe('Project mapper', () => {
name: 'test-project',
sourceRepoPath: '/path/to/source',
sourceRepoUrl: null,
+ mirrorRepoPath: '/path/to/mirror.git',
relayRepoPath: '/path/to/relay',
defaultBranch: 'main',
defaultAgent: 'opencode',
@@ -101,6 +102,7 @@ describe('Project mapper', () => {
name: 'test-project',
sourceRepoPath: '/path/to/source',
sourceRepoUrl: null,
+ mirrorRepoPath: '/path/to/mirror.git',
relayRepoPath: '/path/to/relay',
defaultBranch: 'main',
defaultAgent: 'opencode',
@@ -209,6 +211,7 @@ describe('AgentRun mapper', () => {
id: uuidv4(),
projectId: uuidv4(),
workItemId: uuidv4(),
+ taskId: null as string | null,
agentKey: 'opencode' as const,
status: 'succeeded' as const,
inputSummary: 'Test summary',
@@ -222,6 +225,8 @@ describe('AgentRun mapper', () => {
headShaBefore: 'abc123',
headShaAfter: 'def456',
commitSha: 'ghi789',
+ pid: 12345,
+ idempotencyKey: 'test-idempotency-key',
startedAt: new Date('2024-01-15T10:30:00.000Z'),
finishedAt: new Date('2024-01-15T11:00:00.000Z'),
createdAt: new Date('2024-01-15T10:30:00.000Z'),
@@ -239,6 +244,8 @@ describe('AgentRun mapper', () => {
expect(dto.updatedAt).toBe('2024-01-15T11:00:00.000Z');
expect(dto.startedAt).toBe('2024-01-15T10:30:00.000Z');
expect(dto.finishedAt).toBe('2024-01-15T11:00:00.000Z');
+ // Verify pid is included
+ expect(dto.pid).toBe(12345);
});
it('handles null optional date fields', () => {
@@ -246,6 +253,7 @@ describe('AgentRun mapper', () => {
id: uuidv4(),
projectId: uuidv4(),
workItemId: uuidv4(),
+ taskId: null as string | null,
agentKey: 'opencode' as const,
status: 'queued' as const,
inputSummary: null,
@@ -259,6 +267,8 @@ describe('AgentRun mapper', () => {
headShaBefore: null,
headShaAfter: null,
commitSha: null,
+ pid: null,
+ idempotencyKey: null as string | null,
startedAt: null,
finishedAt: null,
createdAt: new Date('2024-01-15T10:30:00.000Z'),
@@ -270,6 +280,7 @@ describe('AgentRun mapper', () => {
expect(result.success).toBe(true);
expect(dto.startedAt).toBeNull();
expect(dto.finishedAt).toBeNull();
+ expect(dto.pid).toBeNull();
});
it('converts DTO to domain model', () => {
@@ -277,6 +288,7 @@ describe('AgentRun mapper', () => {
id: uuidv4(),
projectId: uuidv4(),
workItemId: uuidv4(),
+ taskId: null as string | null,
agentKey: 'opencode' as const,
status: 'succeeded' as const,
inputSummary: 'Test summary',
@@ -290,6 +302,8 @@ describe('AgentRun mapper', () => {
headShaBefore: 'abc123',
headShaAfter: 'def456',
commitSha: 'ghi789',
+ pid: 12345,
+ idempotencyKey: 'test-idempotency-key',
startedAt: '2024-01-15T10:30:00.000Z',
finishedAt: '2024-01-15T11:00:00.000Z',
createdAt: '2024-01-15T10:30:00.000Z',
@@ -301,6 +315,7 @@ describe('AgentRun mapper', () => {
expect(domain.finishedAt).toEqual(new Date('2024-01-15T11:00:00.000Z'));
expect(domain.createdAt).toEqual(new Date('2024-01-15T10:30:00.000Z'));
expect(domain.updatedAt).toEqual(new Date('2024-01-15T11:00:00.000Z'));
+ expect(domain.pid).toBe(12345);
});
});
@@ -321,6 +336,7 @@ describe('PullRequest mapper', () => {
mergedAt: new Date('2024-01-15T11:00:00.000Z'),
mergedBy: 'user@example.com',
mergeCommitSha: 'abc123',
+ syncedCommitSha: null,
};
const dto = pullRequestToDTO(domain);
@@ -351,6 +367,7 @@ describe('PullRequest mapper', () => {
mergedAt: null,
mergedBy: null,
mergeCommitSha: null,
+ syncedCommitSha: null,
};
const dto = pullRequestToDTO(domain);
@@ -375,6 +392,7 @@ describe('PullRequest mapper', () => {
mergedAt: '2024-01-15T11:00:00.000Z',
mergedBy: 'user@example.com',
mergeCommitSha: 'abc123',
+ syncedCommitSha: null,
};
const domain = pullRequestToDomain(dto);
@@ -384,44 +402,6 @@ describe('PullRequest mapper', () => {
});
});
-describe('TargetRepo mapper', () => {
- it('converts domain model to DTO matching shared schema', () => {
- const domain = {
- id: uuidv4(),
- name: 'test-target',
- repoPath: '/path/to/target',
- defaultBranch: 'main',
- createdAt: new Date('2024-01-15T10:30:00.000Z'),
- updatedAt: new Date('2024-01-15T10:30:00.000Z'),
- };
-
- const dto = targetRepoToDTO(domain);
-
- // Validate against shared schema
- const result = TargetRepoSchema.safeParse(dto);
- expect(result.success).toBe(true);
-
- // Verify date fields are in canonical ISO format
- expect(dto.createdAt).toBe('2024-01-15T10:30:00.000Z');
- expect(dto.updatedAt).toBe('2024-01-15T10:30:00.000Z');
- });
-
- it('converts DTO to domain model', () => {
- const dto = {
- id: uuidv4(),
- name: 'test-target',
- repoPath: '/path/to/target',
- defaultBranch: 'main',
- createdAt: '2024-01-15T10:30:00.000Z',
- updatedAt: '2024-01-15T10:30:00.000Z',
- };
-
- const domain = targetRepoToDomain(dto);
- expect(domain.createdAt).toEqual(new Date('2024-01-15T10:30:00.000Z'));
- expect(domain.updatedAt).toEqual(new Date('2024-01-15T10:30:00.000Z'));
- });
-});
-
describe('ReviewThread mapper', () => {
it('converts domain model to DTO matching shared schema', () => {
const domain = {
diff --git a/backend/src/mappers/projects.ts b/backend/src/mappers/projects.ts
index 34931d2..be93171 100644
--- a/backend/src/mappers/projects.ts
+++ b/backend/src/mappers/projects.ts
@@ -14,6 +14,7 @@ export function toDTO(domain: ProjectDomain): ProjectDTO {
name: domain.name,
sourceRepoPath: domain.sourceRepoPath,
sourceRepoUrl: domain.sourceRepoUrl,
+ mirrorRepoPath: domain.mirrorRepoPath,
relayRepoPath: domain.relayRepoPath,
defaultBranch: domain.defaultBranch,
defaultAgent: domain.defaultAgent,
@@ -33,6 +34,7 @@ export function toDomain(dto: ProjectDTO): ProjectDomain {
name: dto.name,
sourceRepoPath: dto.sourceRepoPath,
sourceRepoUrl: dto.sourceRepoUrl,
+ mirrorRepoPath: dto.mirrorRepoPath,
relayRepoPath: dto.relayRepoPath,
defaultBranch: dto.defaultBranch,
defaultAgent: dto.defaultAgent,
diff --git a/backend/src/mappers/targetRepos.ts b/backend/src/mappers/targetRepos.ts
deleted file mode 100644
index 47fe5ed..0000000
--- a/backend/src/mappers/targetRepos.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Mapper for TargetRepo - converts between domain model (Date) and DTO (ISO string)
- */
-
-import type { TargetRepo as TargetRepoDomain } from '../types/models.js';
-import type { TargetRepoDTO } from 'git-vibe-shared';
-
-/**
- * Convert domain model (with Date) to DTO (with ISO string)
- */
-export function toDTO(domain: TargetRepoDomain): TargetRepoDTO {
- return {
- id: domain.id,
- name: domain.name,
- repoPath: domain.repoPath,
- defaultBranch: domain.defaultBranch,
- createdAt: domain.createdAt.toISOString(),
- updatedAt: domain.updatedAt.toISOString(),
- };
-}
-
-/**
- * Convert DTO (with ISO string) to domain model (with Date)
- */
-export function toDomain(dto: TargetRepoDTO): TargetRepoDomain {
- return {
- id: dto.id,
- name: dto.name,
- repoPath: dto.repoPath,
- defaultBranch: dto.defaultBranch,
- createdAt: new Date(dto.createdAt),
- updatedAt: new Date(dto.updatedAt),
- };
-}
diff --git a/backend/src/mappers/tasks.ts b/backend/src/mappers/tasks.ts
new file mode 100644
index 0000000..f2a9ca5
--- /dev/null
+++ b/backend/src/mappers/tasks.ts
@@ -0,0 +1,44 @@
+/**
+ * Mapper for Task - converts between domain model (Date) and DTO (ISO string)
+ */
+
+import type { Task as TaskDomain } from '../types/models.js';
+import type { TaskDTO } from 'git-vibe-shared';
+
+/**
+ * Convert domain model (with Date) to DTO (with ISO string)
+ */
+export function toDTO(domain: TaskDomain): TaskDTO {
+ return {
+ id: domain.id,
+ workItemId: domain.workItemId,
+ taskType: domain.taskType,
+ status: domain.status,
+ input: domain.input,
+ output: domain.output,
+ currentAgentRunId: domain.currentAgentRunId,
+ idempotencyKey: domain.idempotencyKey,
+ nodeRunId: domain.nodeRunId,
+ createdAt: domain.createdAt.toISOString(),
+ updatedAt: domain.updatedAt.toISOString(),
+ };
+}
+
+/**
+ * Convert DTO (with ISO string) to domain model (with Date)
+ */
+export function toDomain(dto: TaskDTO): TaskDomain {
+ return {
+ id: dto.id,
+ workItemId: dto.workItemId,
+ taskType: dto.taskType,
+ status: dto.status,
+ input: dto.input,
+ output: dto.output,
+ currentAgentRunId: dto.currentAgentRunId,
+ idempotencyKey: dto.idempotencyKey,
+ nodeRunId: dto.nodeRunId,
+ createdAt: new Date(dto.createdAt),
+ updatedAt: new Date(dto.updatedAt),
+ };
+}
diff --git a/backend/src/middleware/setup.ts b/backend/src/middleware/setup.ts
index ff1e76a..98eb4f2 100644
--- a/backend/src/middleware/setup.ts
+++ b/backend/src/middleware/setup.ts
@@ -5,7 +5,7 @@ import { ensureStorageDirectories } from '../utils/storage.js';
export async function createServer() {
const server = Fastify({
logger: {
- level: process.env.LOG_LEVEL || 'info',
+ level: process.env.LOG_LEVEL || 'warn',
transport: {
target: 'pino-pretty',
options: {
diff --git a/backend/src/models/schema.ts b/backend/src/models/schema.ts
index d44cd77..48b51ae 100644
--- a/backend/src/models/schema.ts
+++ b/backend/src/models/schema.ts
@@ -1,7 +1,24 @@
import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core';
import { sql } from 'drizzle-orm';
-export const workItems = sqliteTable(
+// ============================================================================
+// Resource Type Enum (Optimized Design)
+// ============================================================================
+
+export const RESOURCE_TYPES = [
+ 'WorkItem',
+ 'Worktree',
+ 'Task',
+ 'AgentRun',
+ 'PullRequest',
+ 'GitOps',
+ 'CommandExec',
+] as const;
+
+export type ResourceType = (typeof RESOURCE_TYPES)[number];
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+export const workItems: any = sqliteTable(
'work_items',
{
id: text('id').primaryKey(),
@@ -26,6 +43,9 @@ export const workItems = sqliteTable(
// Locking fields for serialized agent runs
lockOwnerRunId: text('lock_owner_run_id'),
lockExpiresAt: integer('lock_expires_at', { mode: 'timestamp' }),
+ idempotencyKey: text('idempotency_key'),
+ // Idempotency: link to the NodeRun that created/last updated this resource
+ nodeRunId: text('node_run_id').references(() => nodeRuns.id, { onDelete: 'set null' }),
createdAt: integer('created_at', { mode: 'timestamp' })
.notNull()
.default(sql`(unixepoch())`),
@@ -39,6 +59,8 @@ export const workItems = sqliteTable(
workspaceStatusIdx: index('idx_work_items_workspace_status').on(table.workspaceStatus),
headBranchIdx: index('idx_work_items_head_branch').on(table.headBranch),
lockOwnerIdx: index('idx_work_items_lock_owner').on(table.lockOwnerRunId),
+ idempotencyKeyIdx: index('idx_work_items_idempotency_key').on(table.idempotencyKey),
+ nodeRunIdIdx: index('idx_work_items_node_run_id').on(table.nodeRunId),
})
);
@@ -47,6 +69,7 @@ export const projects = sqliteTable('projects', {
name: text('name').notNull().unique(),
sourceRepoPath: text('source_repo_path').notNull(),
sourceRepoUrl: text('source_repo_url'),
+ mirrorRepoPath: text('mirror_repo_path').notNull(),
relayRepoPath: text('relay_repo_path').notNull(),
defaultBranch: text('default_branch').notNull(),
defaultAgent: text('default_agent').notNull().default('opencode'),
@@ -60,19 +83,6 @@ export const projects = sqliteTable('projects', {
.default(sql`(unixepoch())`),
});
-export const targetRepos = sqliteTable('target_repos', {
- id: text('id').primaryKey(),
- name: text('name').notNull(),
- repoPath: text('repo_path').notNull().unique(),
- defaultBranch: text('default_branch').notNull(),
- createdAt: integer('created_at', { mode: 'timestamp' })
- .notNull()
- .default(sql`(unixepoch())`),
- updatedAt: integer('updated_at', { mode: 'timestamp' })
- .notNull()
- .default(sql`(unixepoch())`),
-});
-
export const pullRequests = sqliteTable(
'pull_requests',
{
@@ -94,6 +104,9 @@ export const pullRequests = sqliteTable(
mergeStrategy: text('merge_strategy', { enum: ['merge', 'squash', 'rebase'] })
.notNull()
.default('merge'),
+ idempotencyKey: text('idempotency_key'),
+ // Idempotency: link to NodeRun that created this resource
+ nodeRunId: text('node_run_id'),
createdAt: integer('created_at', { mode: 'timestamp' })
.notNull()
.default(sql`(unixepoch())`),
@@ -108,6 +121,7 @@ export const pullRequests = sqliteTable(
(table) => ({
workItemIdIdx: index('idx_pull_requests_work_item_id').on(table.workItemId),
statusIdx: index('idx_pull_requests_status').on(table.status),
+ idempotencyKeyIdx: index('idx_pull_requests_idempotency_key').on(table.idempotencyKey),
})
);
@@ -148,6 +162,106 @@ export const reviewComments = sqliteTable('review_comments', {
.default(sql`(unixepoch())`),
});
+// Tasks table (Domain resource) - must be defined before agentRuns
+export const tasks = sqliteTable(
+ 'tasks',
+ {
+ id: text('id').primaryKey(),
+ workItemId: text('work_item_id')
+ .notNull()
+ .references(() => workItems.id, { onDelete: 'cascade' }),
+ taskType: text('task_type').notNull(),
+ status: text('status', {
+ enum: ['pending', 'running', 'succeeded', 'failed', 'canceled', 'blocked'],
+ })
+ .notNull()
+ .default('pending'),
+ input: text('input').notNull().default('{}'),
+ output: text('output').notNull().default('{}'),
+ currentAgentRunId: text('current_agent_run_id'),
+ idempotencyKey: text('idempotency_key'),
+ nodeRunId: text('node_run_id').references(() => nodeRuns.id, { onDelete: 'set null' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer('updated_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workItemIdIdx: index('idx_tasks_work_item_id').on(table.workItemId),
+ taskTypeIdx: index('idx_tasks_task_type').on(table.taskType),
+ statusIdx: index('idx_tasks_status').on(table.status),
+ idempotencyKeyIdx: index('idx_tasks_idempotency_key').on(table.idempotencyKey),
+ currentAgentRunIdIdx: index('idx_tasks_current_agent_run_id').on(table.currentAgentRunId),
+ })
+);
+
+// Worktrees table (Op resource)
+export const worktrees = sqliteTable(
+ 'worktrees',
+ {
+ id: text('id').primaryKey(),
+ workItemId: text('work_item_id')
+ .notNull()
+ .references(() => workItems.id, { onDelete: 'cascade' }),
+ path: text('path').notNull(),
+ branch: text('branch').notNull(),
+ repoSha: text('repo_sha'),
+ status: text('status', {
+ enum: ['pending', 'running', 'succeeded', 'failed', 'canceled'],
+ })
+ .notNull()
+ .default('pending'),
+ idempotencyKey: text('idempotency_key'),
+ nodeRunId: text('node_run_id').references(() => nodeRuns.id, { onDelete: 'set null' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer('updated_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workItemIdIdx: index('idx_worktrees_work_item_id').on(table.workItemId),
+ statusIdx: index('idx_worktrees_status').on(table.status),
+ idempotencyKeyIdx: index('idx_worktrees_idempotency_key').on(table.idempotencyKey),
+ })
+);
+
+// GitOps table (Op resource)
+export const gitOps = sqliteTable(
+ 'git_ops',
+ {
+ id: text('id').primaryKey(),
+ workItemId: text('work_item_id')
+ .notNull()
+ .references(() => workItems.id, { onDelete: 'cascade' }),
+ operation: text('operation').notNull(),
+ status: text('status', {
+ enum: ['pending', 'running', 'succeeded', 'failed', 'canceled'],
+ })
+ .notNull()
+ .default('pending'),
+ input: text('input').notNull().default('{}'),
+ output: text('output').notNull().default('{}'),
+ idempotencyKey: text('idempotency_key'),
+ nodeRunId: text('node_run_id').references(() => nodeRuns.id, { onDelete: 'set null' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer('updated_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workItemIdIdx: index('idx_git_ops_work_item_id').on(table.workItemId),
+ operationIdx: index('idx_git_ops_operation').on(table.operation),
+ statusIdx: index('idx_git_ops_status').on(table.status),
+ idempotencyKeyIdx: index('idx_git_ops_idempotency_key').on(table.idempotencyKey),
+ })
+);
+
export const agentRuns = sqliteTable(
'agent_runs',
{
@@ -158,6 +272,11 @@ export const agentRuns = sqliteTable(
workItemId: text('work_item_id')
.notNull()
.references(() => workItems.id, { onDelete: 'cascade' }),
+ taskId: text('task_id').references(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (): any => tasks.id,
+ { onDelete: 'set null' }
+ ),
agentKey: text('agent_key').notNull(),
status: text('status', {
enum: ['queued', 'running', 'succeeded', 'failed', 'cancelled'],
@@ -166,7 +285,7 @@ export const agentRuns = sqliteTable(
.default('queued'),
inputSummary: text('input_summary'),
inputJson: text('input_json').notNull(),
- sessionId: text('session_id').notNull(),
+ sessionId: text('session_id'), // Nullable: set to null if no session available (task cannot be resumed)
linkedAgentRunId: text('linked_agent_run_id').references(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(): any => agentRuns.id,
@@ -181,6 +300,10 @@ export const agentRuns = sqliteTable(
headShaBefore: text('head_sha_before'),
headShaAfter: text('head_sha_after'),
commitSha: text('commit_sha'),
+ pid: integer('pid'), // Process ID for tracking running processes
+ idempotencyKey: text('idempotency_key'),
+ // Idempotency: link to NodeRun that created this resource
+ nodeRunId: text('node_run_id'),
startedAt: integer('started_at', { mode: 'timestamp' }),
finishedAt: integer('finished_at', { mode: 'timestamp' }),
createdAt: integer('created_at', { mode: 'timestamp' })
@@ -194,5 +317,171 @@ export const agentRuns = sqliteTable(
workItemIdIdx: index('idx_agent_runs_work_item_id').on(table.workItemId),
sessionIdIdx: index('idx_agent_runs_session_id').on(table.sessionId),
statusIdx: index('idx_agent_runs_status').on(table.status),
+ taskIdIdx: index('idx_agent_runs_task_id').on(table.taskId),
+ idempotencyKeyIdx: index('idx_agent_runs_idempotency_key').on(table.idempotencyKey),
+ })
+);
+
+export const workflows = sqliteTable(
+ 'workflows',
+ {
+ id: text('id').primaryKey(),
+ projectId: text('project_id')
+ .notNull()
+ .references(() => projects.id, { onDelete: 'cascade' }),
+ name: text('name').notNull(),
+ definition: text('definition').notNull(), // JSON stringified Workflow
+ isDefault: integer('is_default', { mode: 'boolean' }).notNull().default(false),
+ version: integer('version').notNull().default(1),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer('updated_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ projectIdIdx: index('idx_workflows_project_id').on(table.projectId),
+ projectNameUnique: index('idx_workflows_project_name_unique').on(table.projectId, table.name),
+ })
+);
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+export const workflowRuns: any = sqliteTable(
+ 'workflow_runs',
+ {
+ id: text('id').primaryKey(),
+ workflowId: text('workflow_id')
+ .notNull()
+ .references(() => workflows.id, { onDelete: 'cascade' }),
+ workItemId: text('work_item_id')
+ .notNull()
+ .references(() => workItems.id, { onDelete: 'cascade' }),
+ status: text('status', {
+ enum: ['pending', 'running', 'succeeded', 'failed', 'canceled', 'blocked'],
+ })
+ .notNull()
+ .default('pending'),
+ currentStepId: text('current_step_id'),
+ startedAt: integer('started_at', { mode: 'timestamp' }),
+ finishedAt: integer('finished_at', { mode: 'timestamp' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workflowIdIdx: index('idx_workflow_runs_workflow_id').on(table.workflowId),
+ workItemIdIdx: index('idx_workflow_runs_work_item_id').on(table.workItemId),
+ statusIdx: index('idx_workflow_runs_status').on(table.status),
+ })
+);
+
+// CommandExec table for optimized workflow design
+// Uses file paths for stdout/stderr like agent_runs, instead of storing text directly
+export const commandExecs = sqliteTable(
+ 'command_execs',
+ {
+ id: text('id').primaryKey(),
+ workItemId: text('work_item_id')
+ .notNull()
+ .references(() => workItems.id, { onDelete: 'cascade' }),
+ nodeRunId: text('node_run_id'),
+ command: text('command').notNull(),
+ status: text('status', { enum: ['pending', 'running', 'succeeded', 'failed'] })
+ .notNull()
+ .default('pending'),
+ exitCode: integer('exit_code'),
+ stdoutPath: text('stdout_path'),
+ stderrPath: text('stderr_path'),
+ logPath: text('log_path'),
+ idempotencyKey: text('idempotency_key'),
+ startedAt: integer('started_at', { mode: 'timestamp' }),
+ completedAt: integer('completed_at', { mode: 'timestamp' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer('updated_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workItemIdIdx: index('idx_command_execs_work_item_id').on(table.workItemId),
+ nodeRunIdIdx: index('idx_command_execs_node_run_id').on(table.nodeRunId),
+ statusIdx: index('idx_command_execs_status').on(table.status),
+ idempotencyKeyIdx: index('idx_command_execs_idempotency_key').on(table.idempotencyKey),
+ })
+);
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+export const nodeRuns: any = sqliteTable(
+ 'node_runs',
+ {
+ id: text('id').primaryKey(),
+ runId: text('run_id').notNull(),
+ workflowRunId: text('workflow_run_id')
+ .notNull()
+ .references(() => workflowRuns.id, { onDelete: 'cascade' }),
+ nodeId: text('node_id').notNull(),
+ resourceType: text('resource_type', {
+ enum: RESOURCE_TYPES,
+ }).notNull(),
+ subjectKind: text('subject_kind', {
+ enum: ['workitem', 'task', 'pr_request', 'worktree'],
+ }).notNull(),
+ subjectId: text('subject_id').notNull(),
+ subjectVersionAtStart: integer('subject_version_at_start').notNull(),
+ status: text('status', {
+ enum: ['pending', 'running', 'succeeded', 'failed', 'canceled', 'blocked'],
+ })
+ .notNull()
+ .default('pending'),
+ attempt: integer('attempt').notNull().default(1),
+ idempotencyKey: text('idempotency_key'),
+ input: text('input').notNull(), // JSON stringified
+ output: text('output').notNull(), // JSON stringified
+ error: text('error'),
+ startedAt: integer('started_at', { mode: 'timestamp' }),
+ finishedAt: integer('finished_at', { mode: 'timestamp' }),
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ },
+ (table) => ({
+ workflowRunIdIdx: index('idx_node_runs_workflow_run_id').on(table.workflowRunId),
+ nodeIdIdx: index('idx_node_runs_node_id').on(table.nodeId),
+ resourceTypeIdx: index('idx_node_runs_resource_type').on(table.resourceType),
+ subjectIdx: index('idx_node_runs_subject').on(table.subjectKind, table.subjectId),
+ idempotencyKeyIdx: index('idx_node_runs_idempotency_key').on(table.idempotencyKey),
+ statusIdx: index('idx_node_runs_status').on(table.status),
+ })
+);
+
+// Global app settings (key-value). Keys: defaultAgent, defaultAgentParams (JSON)
+export const appSettings = sqliteTable('app_settings', {
+ key: text('key').primaryKey(),
+ value: text('value').notNull(),
+});
+
+export const eventOutbox = sqliteTable(
+ 'event_outbox',
+ {
+ id: text('id').primaryKey(),
+ eventId: text('event_id').notNull().unique(),
+ eventType: text('event_type').notNull(),
+ eventData: text('event_data').notNull(), // JSON stringified
+ subjectKind: text('subject_kind').notNull(),
+ subjectId: text('subject_id').notNull(),
+ resourceVersion: integer('resource_version'),
+ causedBy: text('caused_by'), // JSON stringified
+ createdAt: integer('created_at', { mode: 'timestamp' })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ processedAt: integer('processed_at', { mode: 'timestamp' }),
+ retryCount: integer('retry_count').notNull().default(0),
+ },
+ (table) => ({
+ eventIdIdx: index('idx_event_outbox_event_id').on(table.eventId),
+ subjectIdx: index('idx_event_outbox_subject').on(table.subjectKind, table.subjectId),
+ processedIdx: index('idx_event_outbox_processed').on(table.processedAt),
})
);
diff --git a/backend/src/repositories/AgentRunsRepository.ts b/backend/src/repositories/AgentRunsRepository.ts
index 9ddbfa0..c0cf030 100644
--- a/backend/src/repositories/AgentRunsRepository.ts
+++ b/backend/src/repositories/AgentRunsRepository.ts
@@ -1,4 +1,5 @@
import { eq } from 'drizzle-orm';
+import { AGENT_RUN_STATUS_QUEUED } from 'git-vibe-shared';
import { agentRuns } from '../models/schema.js';
import type { AgentRun } from '../types/models.js';
import { getDb } from '../db/client.js';
@@ -20,8 +21,11 @@ export class AgentRunsRepository {
agentKey: string;
inputSummary?: string;
inputJson: string;
- sessionId: string;
+ sessionId: string | null;
linkedAgentRunId?: string | null;
+ taskId?: string | null;
+ idempotencyKey?: string | null;
+ nodeRunId?: string | null;
}): Promise {
const db = await this.getDbInstance();
const values: {
@@ -30,10 +34,13 @@ export class AgentRunsRepository {
projectId: string;
agentKey: string;
inputJson: string;
- sessionId: string;
+ sessionId: string | null;
status: 'queued' | 'running' | 'succeeded' | 'failed' | 'cancelled';
inputSummary?: string;
linkedAgentRunId?: string | null;
+ taskId?: string | null;
+ idempotencyKey?: string | null;
+ nodeRunId?: string | null;
} = {
id: data.id,
workItemId: data.workItemId,
@@ -41,7 +48,7 @@ export class AgentRunsRepository {
agentKey: data.agentKey,
inputJson: data.inputJson,
sessionId: data.sessionId,
- status: 'queued',
+ status: AGENT_RUN_STATUS_QUEUED,
};
if (data.inputSummary !== undefined) {
@@ -52,16 +59,28 @@ export class AgentRunsRepository {
values.linkedAgentRunId = data.linkedAgentRunId;
}
+ if (data.taskId !== undefined) {
+ values.taskId = data.taskId;
+ }
+
+ if (data.idempotencyKey !== undefined) {
+ values.idempotencyKey = data.idempotencyKey;
+ }
+
+ if (data.nodeRunId !== undefined) {
+ values.nodeRunId = data.nodeRunId;
+ }
+
const [agentRun] = await db.insert(agentRuns).values(values).returning().execute();
- return agentRun as AgentRun;
+ return this.mapToAgentRun(agentRun);
}
async findById(id: string): Promise {
const db = await this.getDbInstance();
const [agentRun] = await db.select().from(agentRuns).where(eq(agentRuns.id, id)).execute();
- return agentRun as AgentRun | undefined;
+ return agentRun ? this.mapToAgentRun(agentRun) : undefined;
}
async findByWorkItemId(workItemId: string): Promise {
@@ -72,7 +91,46 @@ export class AgentRunsRepository {
.where(eq(agentRuns.workItemId, workItemId))
.execute();
- return result as AgentRun[];
+ return result.map((r) => this.mapToAgentRun(r));
+ }
+
+ private mapToAgentRun(row: any): AgentRun {
+ return {
+ id: row.id,
+ projectId: row.projectId,
+ workItemId: row.workItemId,
+ taskId: row.taskId || null,
+ agentKey: row.agentKey,
+ status: row.status,
+ inputSummary: row.inputSummary,
+ inputJson: row.inputJson,
+ sessionId: row.sessionId,
+ linkedAgentRunId: row.linkedAgentRunId,
+ log: row.log,
+ logPath: row.logPath,
+ stdoutPath: row.stdoutPath,
+ stderrPath: row.stderrPath,
+ headShaBefore: row.headShaBefore,
+ headShaAfter: row.headShaAfter,
+ commitSha: row.commitSha,
+ pid: row.pid,
+ idempotencyKey: row.idempotencyKey || null,
+ nodeRunId: row.nodeRunId || null,
+ startedAt:
+ row.startedAt instanceof Date
+ ? row.startedAt
+ : row.startedAt
+ ? new Date(row.startedAt * 1000)
+ : null,
+ finishedAt:
+ row.finishedAt instanceof Date
+ ? row.finishedAt
+ : row.finishedAt
+ ? new Date(row.finishedAt * 1000)
+ : null,
+ createdAt: row.createdAt instanceof Date ? row.createdAt : new Date(row.createdAt * 1000),
+ updatedAt: row.updatedAt instanceof Date ? row.updatedAt : new Date(row.updatedAt * 1000),
+ };
}
async update(
@@ -90,7 +148,7 @@ export class AgentRunsRepository {
.returning()
.execute();
- return agentRun as AgentRun | undefined;
+ return agentRun ? this.mapToAgentRun(agentRun) : undefined;
}
}
diff --git a/backend/src/repositories/GitOpsRepository.ts b/backend/src/repositories/GitOpsRepository.ts
new file mode 100644
index 0000000..1f3ddfb
--- /dev/null
+++ b/backend/src/repositories/GitOpsRepository.ts
@@ -0,0 +1,131 @@
+import { eq } from 'drizzle-orm';
+import { gitOps } from '../models/schema.js';
+import type { GitOp } from '../types/models.js';
+import { getDb } from '../db/client.js';
+
+export class GitOpsRepository {
+ private db: Awaited> | null = null;
+
+ private async getDbInstance() {
+ if (!this.db) {
+ this.db = await getDb();
+ }
+ return this.db;
+ }
+
+ async create(data: {
+ id: string;
+ workItemId: string;
+ operation: string;
+ status?: GitOp['status'];
+ input?: Record;
+ output?: Record;
+ idempotencyKey?: string | null;
+ nodeRunId?: string | null;
+ }): Promise {
+ const db = await this.getDbInstance();
+ const [gitOp] = await db
+ .insert(gitOps)
+ .values({
+ id: data.id,
+ workItemId: data.workItemId,
+ operation: data.operation,
+ status: data.status || 'pending',
+ input: JSON.stringify(data.input || {}),
+ output: JSON.stringify(data.output || {}),
+ idempotencyKey: data.idempotencyKey || null,
+ nodeRunId: data.nodeRunId || null,
+ })
+ .returning()
+ .execute();
+
+ return this.mapToGitOp(gitOp);
+ }
+
+ async findById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const [gitOp] = await db.select().from(gitOps).where(eq(gitOps.id, id)).execute();
+
+ return gitOp ? this.mapToGitOp(gitOp) : undefined;
+ }
+
+ async findByWorkItemId(workItemId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(gitOps)
+ .where(eq(gitOps.workItemId, workItemId))
+ .execute();
+
+ return result.map((g) => this.mapToGitOp(g));
+ }
+
+ async findByIdempotencyKey(idempotencyKey: string): Promise {
+ const db = await this.getDbInstance();
+ const [gitOp] = await db
+ .select()
+ .from(gitOps)
+ .where(eq(gitOps.idempotencyKey, idempotencyKey))
+ .execute();
+
+ return gitOp ? this.mapToGitOp(gitOp) : undefined;
+ }
+
+ async update(
+ id: string,
+ data: Partial<{
+ operation: string;
+ status: GitOp['status'];
+ input: Record;
+ output: Record;
+ }>
+ ): Promise {
+ const db = await this.getDbInstance();
+ const updateData: Record = {
+ updatedAt: new Date(),
+ };
+
+ if (data.operation !== undefined) {
+ updateData.operation = data.operation;
+ }
+ if (data.status !== undefined) {
+ updateData.status = data.status;
+ }
+ if (data.input !== undefined) {
+ updateData.input = JSON.stringify(data.input);
+ }
+ if (data.output !== undefined) {
+ updateData.output = JSON.stringify(data.output);
+ }
+
+ const [gitOp] = await db
+ .update(gitOps)
+ .set(updateData)
+ .where(eq(gitOps.id, id))
+ .returning()
+ .execute();
+
+ return gitOp ? this.mapToGitOp(gitOp) : undefined;
+ }
+
+ async updateStatus(id: string, status: GitOp['status']): Promise {
+ return this.update(id, { status });
+ }
+
+ private mapToGitOp(row: any): GitOp {
+ return {
+ id: row.id,
+ workItemId: row.workItemId,
+ operation: row.operation,
+ status: row.status,
+ input: typeof row.input === 'string' ? JSON.parse(row.input) : row.input || {},
+ output: typeof row.output === 'string' ? JSON.parse(row.output) : row.output || {},
+ idempotencyKey: row.idempotencyKey,
+ nodeRunId: row.nodeRunId,
+ createdAt: row.createdAt instanceof Date ? row.createdAt : new Date(row.createdAt * 1000),
+ updatedAt: row.updatedAt instanceof Date ? row.updatedAt : new Date(row.updatedAt * 1000),
+ };
+ }
+}
+
+export const gitOpsRepository = new GitOpsRepository();
diff --git a/backend/src/repositories/ProjectsRepository.ts b/backend/src/repositories/ProjectsRepository.ts
index 0267820..c7b0ac9 100644
--- a/backend/src/repositories/ProjectsRepository.ts
+++ b/backend/src/repositories/ProjectsRepository.ts
@@ -1,4 +1,4 @@
-import { eq } from 'drizzle-orm';
+import { eq, desc } from 'drizzle-orm';
import { projects } from '../models/schema.js';
import type { Project } from '../types/models.js';
import { getDb } from '../db/client.js';
@@ -18,6 +18,7 @@ export class ProjectsRepository {
name: string;
sourceRepoPath: string;
sourceRepoUrl?: string;
+ mirrorRepoPath: string;
relayRepoPath: string;
defaultBranch: string;
defaultAgent?: string;
@@ -31,6 +32,7 @@ export class ProjectsRepository {
name: data.name,
sourceRepoPath: data.sourceRepoPath,
sourceRepoUrl: data.sourceRepoUrl || null,
+ mirrorRepoPath: data.mirrorRepoPath,
relayRepoPath: data.relayRepoPath,
defaultBranch: data.defaultBranch,
defaultAgent: data.defaultAgent || 'opencode',
@@ -44,7 +46,7 @@ export class ProjectsRepository {
async findAll(): Promise {
const db = await this.getDbInstance();
- const result = await db.select().from(projects).execute();
+ const result = await db.select().from(projects).orderBy(desc(projects.createdAt)).execute();
return result as Project[];
}
diff --git a/backend/src/repositories/PullRequestsRepository.ts b/backend/src/repositories/PullRequestsRepository.ts
index b749478..4124976 100644
--- a/backend/src/repositories/PullRequestsRepository.ts
+++ b/backend/src/repositories/PullRequestsRepository.ts
@@ -1,4 +1,5 @@
-import { eq } from 'drizzle-orm';
+import { eq, desc } from 'drizzle-orm';
+import { PR_STATUS_OPEN, PR_STATUS_MERGED, PR_STATUS_CLOSED } from 'git-vibe-shared';
import { pullRequests } from '../models/schema.js';
import type { PullRequest } from '../types/models.js';
import { getDb } from '../db/client.js';
@@ -33,7 +34,7 @@ export class PullRequestsRepository {
workItemId: data.workItemId,
title: data.title,
description: data.description || null,
- status: data.status || 'open',
+ status: data.status || PR_STATUS_OPEN,
sourceBranch: data.sourceBranch,
targetBranch: data.targetBranch,
mergeStrategy: data.mergeStrategy || 'merge',
@@ -62,12 +63,23 @@ export class PullRequestsRepository {
return pr as PullRequest | undefined;
}
+ async findAll(): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(pullRequests)
+ .orderBy(desc(pullRequests.createdAt))
+ .execute();
+ return result as PullRequest[];
+ }
+
async findByProjectId(projectId: string): Promise {
const db = await this.getDbInstance();
const result = await db
.select()
.from(pullRequests)
.where(eq(pullRequests.projectId, projectId))
+ .orderBy(desc(pullRequests.createdAt))
.execute();
return result as PullRequest[];
@@ -119,11 +131,11 @@ export class PullRequestsRepository {
}
// Check if PR is already merged or closed
- if (pr.status === 'merged') {
+ if (pr.status === PR_STATUS_MERGED) {
return { canMerge: false, reason: 'Pull request is already merged' };
}
- if (pr.status === 'closed') {
+ if (pr.status === PR_STATUS_CLOSED) {
return { canMerge: false, reason: 'Pull request is closed' };
}
diff --git a/backend/src/repositories/SettingsRepository.ts b/backend/src/repositories/SettingsRepository.ts
new file mode 100644
index 0000000..1736e36
--- /dev/null
+++ b/backend/src/repositories/SettingsRepository.ts
@@ -0,0 +1,53 @@
+import { eq } from 'drizzle-orm';
+import { appSettings } from '../models/schema.js';
+import { getDb } from '../db/client.js';
+
+const DEFAULT_AGENT = 'opencode';
+const DEFAULT_AGENT_PARAMS = '{}';
+
+export interface GlobalSettings {
+ defaultAgent: string;
+ defaultAgentParams: string; // JSON string
+}
+
+export class SettingsRepository {
+ private db: Awaited> | null = null;
+
+ private async getDbInstance() {
+ if (!this.db) {
+ this.db = await getDb();
+ }
+ return this.db;
+ }
+
+ async getGlobalSettings(): Promise {
+ const db = await this.getDbInstance();
+ const rows = await db.select().from(appSettings).execute();
+ const map = new Map(rows.map((r) => [r.key, r.value]));
+ return {
+ defaultAgent: map.get('defaultAgent') ?? DEFAULT_AGENT,
+ defaultAgentParams: map.get('defaultAgentParams') ?? DEFAULT_AGENT_PARAMS,
+ };
+ }
+
+ async updateGlobalSettings(updates: Partial): Promise {
+ const db = await this.getDbInstance();
+ if (updates.defaultAgent !== undefined) {
+ await db.delete(appSettings).where(eq(appSettings.key, 'defaultAgent')).execute();
+ await db
+ .insert(appSettings)
+ .values({ key: 'defaultAgent', value: updates.defaultAgent })
+ .execute();
+ }
+ if (updates.defaultAgentParams !== undefined) {
+ await db.delete(appSettings).where(eq(appSettings.key, 'defaultAgentParams')).execute();
+ await db
+ .insert(appSettings)
+ .values({ key: 'defaultAgentParams', value: updates.defaultAgentParams })
+ .execute();
+ }
+ return this.getGlobalSettings();
+ }
+}
+
+export const settingsRepository = new SettingsRepository();
diff --git a/backend/src/repositories/TargetReposRepository.ts b/backend/src/repositories/TargetReposRepository.ts
deleted file mode 100644
index 243fa4d..0000000
--- a/backend/src/repositories/TargetReposRepository.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-import { eq } from 'drizzle-orm';
-import { targetRepos } from '../models/schema.js';
-import type { TargetRepo } from '../types/models.js';
-import { getDb } from '../db/client.js';
-
-export class TargetReposRepository {
- private db: Awaited> | null = null;
-
- private async getDbInstance() {
- if (!this.db) {
- this.db = await getDb();
- }
- return this.db;
- }
-
- async create(data: {
- id: string;
- name: string;
- repoPath: string;
- defaultBranch: string;
- }): Promise {
- const db = await this.getDbInstance();
- const [targetRepo] = await db
- .insert(targetRepos)
- .values({
- id: data.id,
- name: data.name,
- repoPath: data.repoPath,
- defaultBranch: data.defaultBranch,
- })
- .returning()
- .execute();
-
- return targetRepo as TargetRepo;
- }
-
- async findAll(): Promise {
- const db = await this.getDbInstance();
- const result = await db.select().from(targetRepos).execute();
- return result as TargetRepo[];
- }
-
- async findById(id: string): Promise {
- const db = await this.getDbInstance();
- const [targetRepo] = await db
- .select()
- .from(targetRepos)
- .where(eq(targetRepos.id, id))
- .execute();
-
- return targetRepo as TargetRepo | undefined;
- }
-
- async delete(id: string): Promise {
- const db = await this.getDbInstance();
- await db.delete(targetRepos).where(eq(targetRepos.id, id)).execute();
- }
-}
-
-export const targetReposRepository = new TargetReposRepository();
diff --git a/backend/src/repositories/TasksRepository.ts b/backend/src/repositories/TasksRepository.ts
new file mode 100644
index 0000000..37b77e4
--- /dev/null
+++ b/backend/src/repositories/TasksRepository.ts
@@ -0,0 +1,141 @@
+import { eq, and } from 'drizzle-orm';
+import { tasks } from '../models/schema.js';
+import type { Task } from '../types/models.js';
+import { getDb } from '../db/client.js';
+
+export class TasksRepository {
+ private db: Awaited> | null = null;
+
+ private async getDbInstance() {
+ if (!this.db) {
+ this.db = await getDb();
+ }
+ return this.db;
+ }
+
+ async create(data: {
+ id: string;
+ workItemId: string;
+ taskType: string;
+ status?: Task['status'];
+ input?: Record;
+ output?: Record;
+ currentAgentRunId?: string | null;
+ idempotencyKey?: string | null;
+ nodeRunId?: string | null;
+ }): Promise {
+ const db = await this.getDbInstance();
+ const [task] = await db
+ .insert(tasks)
+ .values({
+ id: data.id,
+ workItemId: data.workItemId,
+ taskType: data.taskType,
+ status: data.status || 'pending',
+ input: JSON.stringify(data.input || {}),
+ output: JSON.stringify(data.output || {}),
+ currentAgentRunId: data.currentAgentRunId || null,
+ idempotencyKey: data.idempotencyKey || null,
+ nodeRunId: data.nodeRunId || null,
+ })
+ .returning()
+ .execute();
+
+ return this.mapToTask(task);
+ }
+
+ async findById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const [task] = await db.select().from(tasks).where(eq(tasks.id, id)).execute();
+
+ return task ? this.mapToTask(task) : undefined;
+ }
+
+ async findByWorkItemId(workItemId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db.select().from(tasks).where(eq(tasks.workItemId, workItemId)).execute();
+
+ return result.map((t) => this.mapToTask(t));
+ }
+
+ async findByTaskType(workItemId: string, taskType: string): Promise {
+ const db = await this.getDbInstance();
+ const [task] = await db
+ .select()
+ .from(tasks)
+ .where(and(eq(tasks.workItemId, workItemId), eq(tasks.taskType, taskType)))
+ .execute();
+
+ return task ? this.mapToTask(task) : undefined;
+ }
+
+ async findByIdempotencyKey(idempotencyKey: string): Promise {
+ const db = await this.getDbInstance();
+ const [task] = await db
+ .select()
+ .from(tasks)
+ .where(eq(tasks.idempotencyKey, idempotencyKey))
+ .execute();
+
+ return task ? this.mapToTask(task) : undefined;
+ }
+
+ async update(
+ id: string,
+ data: Partial<{
+ status: Task['status'];
+ input: Record;
+ output: Record;
+ currentAgentRunId: string | null;
+ }>
+ ): Promise {
+ const db = await this.getDbInstance();
+ const updateData: Record = {
+ updatedAt: new Date(),
+ };
+
+ if (data.status !== undefined) {
+ updateData.status = data.status;
+ }
+ if (data.input !== undefined) {
+ updateData.input = JSON.stringify(data.input);
+ }
+ if (data.output !== undefined) {
+ updateData.output = JSON.stringify(data.output);
+ }
+ if (data.currentAgentRunId !== undefined) {
+ updateData.currentAgentRunId = data.currentAgentRunId;
+ }
+
+ const [task] = await db
+ .update(tasks)
+ .set(updateData)
+ .where(eq(tasks.id, id))
+ .returning()
+ .execute();
+
+ return task ? this.mapToTask(task) : undefined;
+ }
+
+ async updateStatus(id: string, status: Task['status']): Promise {
+ return this.update(id, { status });
+ }
+
+ private mapToTask(row: any): Task {
+ return {
+ id: row.id,
+ workItemId: row.workItemId,
+ taskType: row.taskType,
+ status: row.status,
+ input: typeof row.input === 'string' ? JSON.parse(row.input) : row.input || {},
+ output: typeof row.output === 'string' ? JSON.parse(row.output) : row.output || {},
+ currentAgentRunId: row.currentAgentRunId,
+ idempotencyKey: row.idempotencyKey,
+ nodeRunId: row.nodeRunId,
+ createdAt: row.createdAt instanceof Date ? row.createdAt : new Date(row.createdAt * 1000),
+ updatedAt: row.updatedAt instanceof Date ? row.updatedAt : new Date(row.updatedAt * 1000),
+ };
+ }
+}
+
+export const tasksRepository = new TasksRepository();
diff --git a/backend/src/repositories/WorkItemsRepository.ts b/backend/src/repositories/WorkItemsRepository.ts
index 14665b7..a095933 100644
--- a/backend/src/repositories/WorkItemsRepository.ts
+++ b/backend/src/repositories/WorkItemsRepository.ts
@@ -1,7 +1,8 @@
-import { eq } from 'drizzle-orm';
+import { eq, isNotNull, desc } from 'drizzle-orm';
import { workItems } from '../models/schema.js';
import type { WorkItem } from '../types/models.js';
import { getDb } from '../db/client.js';
+import { agentRunsRepository } from './AgentRunsRepository.js';
export class WorkItemsRepository {
private db: Awaited> | null = null;
@@ -28,7 +29,7 @@ export class WorkItemsRepository {
headSha?: string;
}): Promise {
const db = await this.getDbInstance();
- const [workItem] = await db
+ const result = await db
.insert(workItems)
.values({
id: data.id,
@@ -46,12 +47,13 @@ export class WorkItemsRepository {
.returning()
.execute();
+ const [workItem] = Array.isArray(result) ? result : [result];
return workItem as WorkItem;
}
async findAll(): Promise {
const db = await this.getDbInstance();
- const result = await db.select().from(workItems).execute();
+ const result = await db.select().from(workItems).orderBy(desc(workItems.createdAt)).execute();
return result as WorkItem[];
}
@@ -61,6 +63,7 @@ export class WorkItemsRepository {
.select()
.from(workItems)
.where(eq(workItems.projectId, projectId))
+ .orderBy(desc(workItems.createdAt))
.execute();
return result as WorkItem[];
}
@@ -88,16 +91,40 @@ export class WorkItemsRepository {
}
): Promise {
const db = await this.getDbInstance();
- const [workItem] = await db
+
+ // Check if work item exists
+ const [existing] = await db.select().from(workItems).where(eq(workItems.id, id)).execute();
+
+ if (!existing) {
+ return undefined;
+ }
+
+ // Filter out undefined/null values to avoid Drizzle ORM errors
+ const updateFields: Record = {
+ updatedAt: new Date(),
+ };
+
+ // Only include fields that are actually provided and not undefined/null
+ for (const [key, value] of Object.entries(data)) {
+ if (value !== undefined && value !== null) {
+ updateFields[key] = value;
+ }
+ }
+
+ // If no fields to update (only updatedAt), return existing work item
+ if (Object.keys(updateFields).length === 1) {
+ return existing as WorkItem;
+ }
+
+ const result = await db
.update(workItems)
- .set({
- ...data,
- updatedAt: new Date(),
- })
+ .set(updateFields)
.where(eq(workItems.id, id))
.returning()
.execute();
+ const [workItem] = Array.isArray(result) ? result : [result];
+
return workItem as WorkItem | undefined;
}
@@ -130,9 +157,29 @@ export class WorkItemsRepository {
const isExpired = existing.lockExpiresAt ? new Date(existing.lockExpiresAt) < now : true;
const isOwned = existing.lockOwnerRunId === runId;
+ // If locked by another run and not expired, check if that run is still active
if (existing.lockOwnerRunId && !isExpired && !isOwned) {
- // Locked by another run and not expired
- return false;
+ // Check if the lock owner run is still active
+ const isStale = await this.isLockStale(existing.lockOwnerRunId);
+ if (isStale) {
+ // Lock owner run is no longer active, release the stale lock
+ console.log(
+ `[WorkItemsRepository] Releasing stale lock on workItem ${workItemId} owned by inactive run ${existing.lockOwnerRunId}`
+ );
+ await db
+ .update(workItems)
+ .set({
+ lockOwnerRunId: null,
+ lockExpiresAt: null,
+ updatedAt: now,
+ })
+ .where(eq(workItems.id, workItemId))
+ .execute();
+ // Continue to acquire the lock
+ } else {
+ // Locked by another active run and not expired
+ return false;
+ }
}
// Acquire lock
@@ -149,6 +196,22 @@ export class WorkItemsRepository {
return true;
}
+ /**
+ * Check if a lock is stale (i.e., the owner run is no longer active)
+ */
+ private async isLockStale(ownerRunId: string): Promise {
+ const agentRun = await agentRunsRepository.findById(ownerRunId);
+
+ // If run doesn't exist, lock is stale
+ if (!agentRun) {
+ return true;
+ }
+
+ // If run is completed, failed, or cancelled, lock is stale
+ const activeStatuses = ['queued', 'running'];
+ return !activeStatuses.includes(agentRun.status);
+ }
+
async releaseLock(workItemId: string, runId: string): Promise {
const db = await this.getDbInstance();
const now = new Date();
@@ -213,6 +276,23 @@ export class WorkItemsRepository {
return { locked: false };
}
+ // Check if lock is stale (owner run is no longer active)
+ const isStale = await this.isLockStale(workItem.lockOwnerRunId);
+ if (isStale) {
+ // Lock is stale, clear it
+ await db
+ .update(workItems)
+ .set({
+ lockOwnerRunId: null,
+ lockExpiresAt: null,
+ updatedAt: now,
+ })
+ .where(eq(workItems.id, workItemId))
+ .execute();
+
+ return { locked: false };
+ }
+
return {
locked: true,
ownerRunId: workItem.lockOwnerRunId,
@@ -225,6 +305,80 @@ export class WorkItemsRepository {
// The PR is accessed via pullRequestsRepository.findByWorkItemId()
return this.findById(workItemId);
}
+
+ /**
+ * Release all stale locks (locks owned by runs that are no longer active)
+ * This should be called on service startup to clean up locks from crashed services
+ */
+ async releaseStaleLocks(): Promise {
+ const db = await this.getDbInstance();
+ const now = new Date();
+
+ // Find all locked work items (those with a non-null lockOwnerRunId)
+ const lockedWorkItems = await db
+ .select()
+ .from(workItems)
+ .where(isNotNull(workItems.lockOwnerRunId))
+ .execute();
+
+ let releasedCount = 0;
+
+ for (const workItem of lockedWorkItems) {
+ if (!workItem.lockOwnerRunId) {
+ continue;
+ }
+
+ // Check if lock is expired
+ const isExpired = workItem.lockExpiresAt ? new Date(workItem.lockExpiresAt) < now : true;
+ if (isExpired) {
+ // Lock is expired, release it
+ await db
+ .update(workItems)
+ .set({
+ lockOwnerRunId: null,
+ lockExpiresAt: null,
+ updatedAt: now,
+ })
+ .where(eq(workItems.id, workItem.id))
+ .execute();
+ releasedCount++;
+ console.log(
+ `[WorkItemsRepository] Released expired lock on workItem ${workItem.id} (expired at ${workItem.lockExpiresAt})`
+ );
+ continue;
+ }
+
+ // Check if the lock owner run is still active
+ const agentRun = await agentRunsRepository.findById(workItem.lockOwnerRunId);
+ const activeStatuses = ['queued', 'running'];
+ const isStale = !agentRun || !activeStatuses.includes(agentRun.status);
+
+ if (isStale) {
+ // Lock owner run is no longer active, release the stale lock
+ await db
+ .update(workItems)
+ .set({
+ lockOwnerRunId: null,
+ lockExpiresAt: null,
+ updatedAt: now,
+ })
+ .where(eq(workItems.id, workItem.id))
+ .execute();
+ releasedCount++;
+ console.log(
+ `[WorkItemsRepository] Released stale lock on workItem ${workItem.id} owned by run ${workItem.lockOwnerRunId} (status: ${agentRun?.status || 'not found'})`
+ );
+ }
+ }
+
+ if (releasedCount > 0) {
+ console.log(
+ `[WorkItemsRepository] Released ${releasedCount} stale lock(s) on service startup`
+ );
+ }
+
+ return releasedCount;
+ }
}
export const workItemsRepository = new WorkItemsRepository();
diff --git a/backend/src/repositories/WorkflowsRepository.ts b/backend/src/repositories/WorkflowsRepository.ts
new file mode 100644
index 0000000..54a051d
--- /dev/null
+++ b/backend/src/repositories/WorkflowsRepository.ts
@@ -0,0 +1,331 @@
+import { eq, and, desc } from 'drizzle-orm';
+import { workflows, workflowRuns, nodeRuns } from '../models/schema.js';
+import type { Workflow } from '../types/models.js';
+import { getDb } from '../db/client.js';
+
+export interface WorkflowRecord {
+ id: string;
+ projectId: string;
+ name: string;
+ definition: string;
+ isDefault: boolean;
+ version: number;
+ createdAt: Date;
+ updatedAt: Date;
+}
+
+export interface WorkflowRunRecord {
+ id: string;
+ workflowId: string;
+ workItemId: string;
+ status: 'pending' | 'running' | 'succeeded' | 'failed' | 'blocked' | 'skipped';
+ currentStepId: string | null;
+ startedAt: Date | null;
+ finishedAt: Date | null;
+ createdAt: Date;
+}
+
+export interface NodeRunRecord {
+ id: string;
+ runId: string;
+ workflowRunId: string;
+ nodeId: string;
+ resourceType: string;
+ subjectKind: string;
+ subjectId: string;
+ subjectVersionAtStart: number;
+ status: 'pending' | 'running' | 'succeeded' | 'failed' | 'canceled' | 'blocked';
+ attempt: number;
+ idempotencyKey: string | null;
+ input: string;
+ output: string;
+ error: string | null;
+ startedAt: Date | null;
+ finishedAt: Date | null;
+ createdAt: Date;
+}
+
+export class WorkflowsRepository {
+ private db: Awaited> | null = null;
+
+ private async getDbInstance() {
+ if (!this.db) {
+ this.db = await getDb();
+ }
+ return this.db;
+ }
+
+ async create(data: {
+ id: string;
+ projectId: string;
+ name: string;
+ definition: Workflow;
+ isDefault?: boolean;
+ version?: number;
+ }): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .insert(workflows)
+ .values({
+ id: data.id,
+ projectId: data.projectId,
+ name: data.name,
+ definition: JSON.stringify(data.definition),
+ isDefault: data.isDefault ?? false,
+ version: data.version ?? data.definition.version ?? 1,
+ })
+ .returning()
+ .execute();
+
+ const [workflow] = Array.isArray(result) ? result : [result];
+
+ return workflow as WorkflowRecord;
+ }
+
+ async findAll(projectId?: string): Promise {
+ const db = await this.getDbInstance();
+ let query = db.select().from(workflows);
+ if (projectId) {
+ query = query.where(eq(workflows.projectId, projectId)) as typeof query;
+ }
+ query = query.orderBy(desc(workflows.createdAt)) as typeof query;
+ const result = await query.execute();
+ return result as WorkflowRecord[];
+ }
+
+ async findById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db.select().from(workflows).where(eq(workflows.id, id)).execute();
+
+ return result[0] as WorkflowRecord | undefined;
+ }
+
+ async findDefault(projectId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(workflows)
+ .where(and(eq(workflows.projectId, projectId), eq(workflows.isDefault, true)))
+ .execute();
+
+ return result[0] as WorkflowRecord | undefined;
+ }
+
+ async findByProjectId(projectId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(workflows)
+ .where(eq(workflows.projectId, projectId))
+ .orderBy(desc(workflows.createdAt))
+ .execute();
+ return result as WorkflowRecord[];
+ }
+
+ async findByName(name: string, projectId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(workflows)
+ .where(and(eq(workflows.projectId, projectId), eq(workflows.name, name)))
+ .execute();
+
+ return result[0] as WorkflowRecord | undefined;
+ }
+
+ async update(
+ id: string,
+ data: {
+ name?: string;
+ definition?: Workflow;
+ isDefault?: boolean;
+ version?: number;
+ }
+ ): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .update(workflows)
+ .set({
+ name: data.name,
+ definition: data.definition ? JSON.stringify(data.definition) : undefined,
+ isDefault: data.isDefault,
+ version: data.version,
+ updatedAt: new Date(),
+ })
+ .where(eq(workflows.id, id))
+ .returning()
+ .execute();
+
+ const [workflow] = Array.isArray(result) ? result : [result];
+
+ return workflow as WorkflowRecord | undefined;
+ }
+
+ async delete(id: string): Promise {
+ const db = await this.getDbInstance();
+ await db.delete(workflows).where(eq(workflows.id, id)).execute();
+ }
+
+ async createRun(data: {
+ id: string;
+ workflowId: string;
+ workItemId: string;
+ }): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .insert(workflowRuns)
+ .values({
+ id: data.id,
+ workflowId: data.workflowId,
+ workItemId: data.workItemId,
+ status: 'pending',
+ })
+ .returning()
+ .execute();
+
+ const [run] = Array.isArray(result) ? result : [result];
+
+ return run as WorkflowRunRecord;
+ }
+
+ async findAllRuns(workItemId?: string, workflowId?: string): Promise {
+ const db = await this.getDbInstance();
+ let query = db.select().from(workflowRuns);
+
+ const conditions = [];
+ if (workItemId) {
+ conditions.push(eq(workflowRuns.workItemId, workItemId));
+ }
+ if (workflowId) {
+ conditions.push(eq(workflowRuns.workflowId, workflowId));
+ }
+
+ if (conditions.length > 0) {
+ const condition = conditions.length === 1 ? conditions[0]! : and(...conditions);
+ query = query.where(condition) as typeof query;
+ }
+
+ return (await query.execute()) as WorkflowRunRecord[];
+ }
+
+ async findRunById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db.select().from(workflowRuns).where(eq(workflowRuns.id, id)).execute();
+
+ return result[0] as WorkflowRunRecord | undefined;
+ }
+
+ async updateRun(
+ id: string,
+ data: {
+ status?: 'pending' | 'running' | 'succeeded' | 'failed' | 'blocked' | 'skipped';
+ currentStepId?: string | null;
+ startedAt?: Date | null;
+ finishedAt?: Date | null;
+ }
+ ): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .update(workflowRuns)
+ .set({
+ ...data,
+ })
+ .where(eq(workflowRuns.id, id))
+ .returning()
+ .execute();
+
+ const [run] = Array.isArray(result) ? result : [result];
+
+ return run as WorkflowRunRecord | undefined;
+ }
+
+ async deleteRun(id: string): Promise {
+ const db = await this.getDbInstance();
+ await db.delete(workflowRuns).where(eq(workflowRuns.id, id)).execute();
+ }
+
+ async createNodeRun(data: {
+ id: string;
+ runId: string;
+ workflowRunId: string;
+ nodeId: string;
+ resourceType: string;
+ subjectKind: string;
+ subjectId: string;
+ subjectVersionAtStart: number;
+ input: Record;
+ }): Promise {
+ const db = await this.getDbInstance();
+ await db.insert(nodeRuns).values({
+ id: data.id,
+ runId: data.runId,
+ workflowRunId: data.workflowRunId,
+ nodeId: data.nodeId,
+ resourceType: data.resourceType,
+ subjectKind: data.subjectKind,
+ subjectId: data.subjectId,
+ subjectVersionAtStart: data.subjectVersionAtStart,
+ status: 'pending',
+ input: JSON.stringify(data.input),
+ output: '{}',
+ });
+
+ // Fetch created record
+ const result = await db.select().from(nodeRuns).where(eq(nodeRuns.id, data.id)).execute();
+
+ return result[0] as NodeRunRecord;
+ }
+
+ async findNodeRunsByWorkflowRunId(workflowRunId: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db
+ .select()
+ .from(nodeRuns)
+ .where(eq(nodeRuns.workflowRunId, workflowRunId))
+ .execute();
+ return result as NodeRunRecord[];
+ }
+
+ async findNodeRunById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const result = await db.select().from(nodeRuns).where(eq(nodeRuns.id, id)).execute();
+
+ return result[0] as NodeRunRecord | undefined;
+ }
+
+ async updateNodeRun(
+ id: string,
+ data: {
+ status?: 'pending' | 'running' | 'succeeded' | 'failed' | 'canceled' | 'blocked';
+ startedAt?: Date | null;
+ finishedAt?: Date | null;
+ error?: string | null;
+ output?: Record;
+ }
+ ): Promise {
+ const db = await this.getDbInstance();
+ await db
+ .update(nodeRuns)
+ .set({
+ status: data.status,
+ startedAt: data.startedAt,
+ finishedAt: data.finishedAt,
+ error: data.error,
+ output: data.output ? JSON.stringify(data.output) : undefined,
+ })
+ .where(eq(nodeRuns.id, id))
+ .execute();
+
+ // Fetch updated record
+ const result = await db.select().from(nodeRuns).where(eq(nodeRuns.id, id)).execute();
+
+ return result[0] as NodeRunRecord | undefined;
+ }
+
+ async deleteNodeRun(id: string): Promise {
+ const db = await this.getDbInstance();
+ await db.delete(nodeRuns).where(eq(nodeRuns.id, id)).execute();
+ }
+}
+
+export const workflowsRepository = new WorkflowsRepository();
diff --git a/backend/src/repositories/WorktreesRepository.ts b/backend/src/repositories/WorktreesRepository.ts
new file mode 100644
index 0000000..86b7fb4
--- /dev/null
+++ b/backend/src/repositories/WorktreesRepository.ts
@@ -0,0 +1,131 @@
+import { eq } from 'drizzle-orm';
+import { worktrees } from '../models/schema.js';
+import type { Worktree } from '../types/models.js';
+import { getDb } from '../db/client.js';
+
+export class WorktreesRepository {
+ private db: Awaited> | null = null;
+
+ private async getDbInstance() {
+ if (!this.db) {
+ this.db = await getDb();
+ }
+ return this.db;
+ }
+
+ async create(data: {
+ id: string;
+ workItemId: string;
+ path: string;
+ branch: string;
+ repoSha?: string | null;
+ status?: Worktree['status'];
+ idempotencyKey?: string | null;
+ nodeRunId?: string | null;
+ }): Promise {
+ const db = await this.getDbInstance();
+ const [worktree] = await db
+ .insert(worktrees)
+ .values({
+ id: data.id,
+ workItemId: data.workItemId,
+ path: data.path,
+ branch: data.branch,
+ repoSha: data.repoSha || null,
+ status: data.status || 'pending',
+ idempotencyKey: data.idempotencyKey || null,
+ nodeRunId: data.nodeRunId || null,
+ })
+ .returning()
+ .execute();
+
+ return this.mapToWorktree(worktree);
+ }
+
+ async findById(id: string): Promise {
+ const db = await this.getDbInstance();
+ const [worktree] = await db.select().from(worktrees).where(eq(worktrees.id, id)).execute();
+
+ return worktree ? this.mapToWorktree(worktree) : undefined;
+ }
+
+ async findByWorkItemId(workItemId: string): Promise {
+ const db = await this.getDbInstance();
+ const [worktree] = await db
+ .select()
+ .from(worktrees)
+ .where(eq(worktrees.workItemId, workItemId))
+ .execute();
+
+ return worktree ? this.mapToWorktree(worktree) : undefined;
+ }
+
+ async findByIdempotencyKey(idempotencyKey: string): Promise {
+ const db = await this.getDbInstance();
+ const [worktree] = await db
+ .select()
+ .from(worktrees)
+ .where(eq(worktrees.idempotencyKey, idempotencyKey))
+ .execute();
+
+ return worktree ? this.mapToWorktree(worktree) : undefined;
+ }
+
+ async update(
+ id: string,
+ data: Partial<{
+ path: string;
+ branch: string;
+ repoSha: string | null;
+ status: Worktree['status'];
+ }>
+ ): Promise {
+ const db = await this.getDbInstance();
+ const updateData: Record = {
+ updatedAt: new Date(),
+ };
+
+ if (data.path !== undefined) {
+ updateData.path = data.path;
+ }
+ if (data.branch !== undefined) {
+ updateData.branch = data.branch;
+ }
+ if (data.repoSha !== undefined) {
+ updateData.repoSha = data.repoSha;
+ }
+ if (data.status !== undefined) {
+ updateData.status = data.status;
+ }
+
+ const [worktree] = await db
+ .update(worktrees)
+ .set(updateData)
+ .where(eq(worktrees.id, id))
+ .returning()
+ .execute();
+
+ return worktree ? this.mapToWorktree(worktree) : undefined;
+ }
+
+ async updateStatus(id: string, status: Worktree['status']): Promise {
+ return this.update(id, { status });
+ }
+
+ private mapToWorktree(row: any): Worktree {
+ return {
+ id: row.id,
+ workItemId: row.workItemId,
+ path: row.path,
+ branch: row.branch,
+ repoSha: row.repoSha,
+ status: row.status,
+ idempotencyKey: row.idempotencyKey,
+ nodeRunId: row.nodeRunId,
+ createdAt: row.createdAt instanceof Date ? row.createdAt : new Date(row.createdAt * 1000),
+ updatedAt: row.updatedAt instanceof Date ? row.updatedAt : new Date(row.updatedAt * 1000),
+ };
+ }
+}
+
+export const worktreesRepository = new WorktreesRepository();
diff --git a/backend/src/routes/agentRuns.ts b/backend/src/routes/agentRuns.ts
index bc66d83..d6a904e 100644
--- a/backend/src/routes/agentRuns.ts
+++ b/backend/src/routes/agentRuns.ts
@@ -4,11 +4,12 @@ import { TriggerAgentRunDTOSchema, CancelAgentRunResponseSchema } from 'git-vibe
import { agentRunsRepository } from '../repositories/AgentRunsRepository.js';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { projectsRepository } from '../repositories/ProjectsRepository.js';
-import { agentService } from '../services/AgentService.js';
+import { agentService } from '../services/agent/AgentService.js';
import { promises as fs } from 'node:fs';
import { watch } from 'node:fs';
import path from 'node:path';
import { toDTO as agentRunToDTO } from '../mappers/agentRuns.js';
+import { STORAGE_CONFIG } from '../config/storage.js';
export async function agentRunsRoutes(server: FastifyInstance) {
// POST /api/work-items/:id/agent-runs - Start agent run for a WorkItem
@@ -284,7 +285,7 @@ export async function agentRunsRoutes(server: FastifyInstance) {
stderrPosition = initialStderr.length;
}
}
- } catch (error) {
+ } catch {
// Files might not exist yet
}
@@ -296,7 +297,7 @@ export async function agentRunsRoutes(server: FastifyInstance) {
stdoutPosition = await readAndSendLogs(agentRun.stdoutPath, 'stdout', stdoutPosition);
}
});
- } catch (error) {
+ } catch {
// File might not exist yet, will be created later
}
}
@@ -308,7 +309,7 @@ export async function agentRunsRoutes(server: FastifyInstance) {
stderrPosition = await readAndSendLogs(agentRun.stderrPath, 'stderr', stderrPosition);
}
});
- } catch (error) {
+ } catch {
// File might not exist yet, will be created later
}
}
@@ -320,7 +321,6 @@ export async function agentRunsRoutes(server: FastifyInstance) {
}
// If path not in database yet, try to derive it
try {
- const { STORAGE_CONFIG } = await import('../config/storage.js');
const logsDir = STORAGE_CONFIG.logsDir;
const derivedPath = path.join(logsDir, `agent-run-${request.params.id}-stdout.log`);
const stats = await fs.stat(derivedPath);
@@ -339,7 +339,6 @@ export async function agentRunsRoutes(server: FastifyInstance) {
}
// If path not in database yet, try to derive it
try {
- const { STORAGE_CONFIG } = await import('../config/storage.js');
const logsDir = STORAGE_CONFIG.logsDir;
const derivedPath = path.join(logsDir, `agent-run-${request.params.id}-stderr.log`);
const stats = await fs.stat(derivedPath);
@@ -369,7 +368,7 @@ export async function agentRunsRoutes(server: FastifyInstance) {
const keepAliveInterval = setInterval(() => {
try {
reply.raw.write(': keepalive\n\n');
- } catch (error) {
+ } catch {
// Connection might be closed
}
}, 30000);
@@ -386,7 +385,7 @@ export async function agentRunsRoutes(server: FastifyInstance) {
}
try {
reply.raw.end();
- } catch (error) {
+ } catch {
// Connection might already be closed
}
};
diff --git a/backend/src/routes/projects.ts b/backend/src/routes/projects.ts
index 45fa67d..e45ab87 100644
--- a/backend/src/routes/projects.ts
+++ b/backend/src/routes/projects.ts
@@ -11,18 +11,31 @@ import {
BranchesResponseSchema,
SyncResponseSchema,
DeleteProjectResponseSchema,
+ CreateFileDTOSchema,
+ UpdateFileDTOSchema,
+ CommitChangesDTOSchema,
+ GetOrCreateManualWorkItemDTOSchema,
+ ProjectsListResponseSchema,
+ ProjectStatsSchema,
+ WORKITEM_STATUS_OPEN,
+ PR_STATUS_OPEN,
+ PR_STATUS_MERGED,
} from 'git-vibe-shared';
import { projectsRepository } from '../repositories/ProjectsRepository.js';
+import { settingsRepository } from '../repositories/SettingsRepository.js';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
-import { gitService } from '../services/GitService.js';
+import { gitService } from '../services/git/GitService.js';
import { modelsCache } from '../services/ModelsCache.js';
+import { workspaceService } from '../services/WorkspaceService.js';
+import { prService } from '../services/PRService.js';
import { STORAGE_CONFIG } from '../config/storage.js';
import { cleanupDirectory } from '../utils/storage.js';
import path from 'node:path';
import fs from 'node:fs/promises';
import { toDTO as projectToDTO } from '../mappers/projects.js';
import { toDTO as workItemToDTO } from '../mappers/workItems.js';
+import { toDTO as pullRequestToDTO } from '../mappers/pullRequests.js';
export async function projectsRoutes(server: FastifyInstance) {
server.post('/api/projects', async (request, reply) => {
@@ -40,24 +53,61 @@ export async function projectsRoutes(server: FastifyInstance) {
await gitService.validateRepo(body.sourceRepoPath);
- // Use provided defaultBranch or auto-detect from source repo
- const defaultBranch = body.defaultBranch || gitService.getDefaultBranch(body.sourceRepoPath);
+ // Use provided defaultBranch or auto-detect current active branch from source repo
+ const defaultBranch = body.defaultBranch || gitService.getCurrentBranch(body.sourceRepoPath);
+
+ // Auto-detect sourceRepoUrl from git remote if not provided
+ const sourceRepoUrl =
+ body.sourceRepoUrl || gitService.getRemoteUrl(body.sourceRepoPath) || undefined;
+
+ // Determine mirror repo path (shared by projects with same source path)
+ const mirrorRepoPath = gitService.getMirrorRepoPath(body.sourceRepoPath);
// Create relay repo path
const relayRepoPath = path.join(STORAGE_CONFIG.projectsDir, body.name);
- // Create relay repo by copying .git directory and resetting
- await gitService.createRelayRepo(body.sourceRepoPath, relayRepoPath, defaultBranch);
+ // Create relay repo using mirror repo architecture
+ const projectId = uuidv4();
+ await gitService.createRelayRepo(
+ body.sourceRepoPath,
+ relayRepoPath,
+ mirrorRepoPath,
+ projectId,
+ defaultBranch
+ );
+
+ // Use global default settings when project does not specify defaultAgent/agentParams
+ let defaultAgent = body.defaultAgent;
+ let agentParams = body.agentParams;
+ if (defaultAgent === undefined || agentParams === undefined) {
+ const globalSettings = await settingsRepository.getGlobalSettings();
+ if (defaultAgent === undefined)
+ defaultAgent = globalSettings.defaultAgent as 'opencode' | 'claudecode';
+ if (agentParams === undefined) {
+ try {
+ agentParams = JSON.parse(globalSettings.defaultAgentParams || '{}') as Record<
+ string,
+ unknown
+ >;
+ } catch {
+ agentParams = {};
+ }
+ }
+ }
const project = await projectsRepository.create({
- id: uuidv4(),
+ id: projectId,
name: body.name,
sourceRepoPath: body.sourceRepoPath,
- sourceRepoUrl: body.sourceRepoUrl || undefined,
+ sourceRepoUrl,
+ mirrorRepoPath,
relayRepoPath,
defaultBranch,
- defaultAgent: body.defaultAgent || 'opencode',
- agentParams: body.agentParams ? JSON.stringify(body.agentParams) : undefined,
+ defaultAgent: defaultAgent || 'opencode',
+ agentParams:
+ agentParams && Object.keys(agentParams).length > 0
+ ? JSON.stringify(agentParams)
+ : undefined,
});
return reply.status(201).send(projectToDTO(project));
@@ -74,18 +124,48 @@ export async function projectsRoutes(server: FastifyInstance) {
}
});
- server.get<{ Querystring: { page?: string; limit?: string } }>(
+ server.get<{ Querystring: { page?: string; limit?: string; includeStats?: string } }>(
'/api/projects',
async (request) => {
const page = parseInt(request.query.page || '1', 10);
const limit = parseInt(request.query.limit || '10', 10);
+ const includeStats = request.query.includeStats === 'true';
const offset = (page - 1) * limit;
const allProjects = await projectsRepository.findAll();
const total = allProjects.length;
const projects = allProjects.slice(offset, offset + limit);
- return {
+ let statistics: Record> | undefined;
+
+ if (includeStats && projects.length > 0) {
+ const projectIds = projects.map((p) => p.id);
+ const allWorkItems = await workItemsRepository.findAll();
+ const allPullRequests = await pullRequestsRepository.findAll();
+ const relevantWorkItems = allWorkItems.filter((wi) => projectIds.includes(wi.projectId));
+ const relevantPullRequests = allPullRequests.filter((pr) =>
+ projectIds.includes(pr.projectId)
+ );
+
+ const statsMap: Record> = {};
+ for (const projectId of projectIds) {
+ const projectWorkItems = relevantWorkItems.filter((wi) => wi.projectId === projectId);
+ const projectPullRequests = relevantPullRequests.filter(
+ (pr) => pr.projectId === projectId
+ );
+ statsMap[projectId] = ProjectStatsSchema.parse({
+ workItems: projectWorkItems.length,
+ openWorkItems: projectWorkItems.filter((wi) => wi.status === WORKITEM_STATUS_OPEN)
+ .length,
+ pullRequests: projectPullRequests.length,
+ openPullRequests: projectPullRequests.filter((pr) => pr.status === PR_STATUS_OPEN)
+ .length,
+ });
+ }
+ statistics = statsMap;
+ }
+
+ const payload = {
data: projects.map(projectToDTO),
pagination: {
page,
@@ -93,7 +173,9 @@ export async function projectsRoutes(server: FastifyInstance) {
total,
totalPages: Math.ceil(total / limit),
},
+ ...(statistics != null && { statistics }),
};
+ return ProjectsListResponseSchema.parse(payload);
}
);
@@ -169,18 +251,18 @@ export async function projectsRoutes(server: FastifyInstance) {
const { agent = 'opencode' } = request.query;
// Validate agent parameter
- if (agent !== 'opencode' && agent !== 'claudcode') {
+ if (agent !== 'opencode' && agent !== 'claudecode') {
return reply.status(400).send({
error: true,
- message: 'Invalid agent parameter. Must be "opencode" or "claudcode"',
+ message: 'Invalid agent parameter. Must be "opencode" or "claudecode"',
});
}
// Initialize cache for the agent if not already initialized
- await modelsCache.initialize(agent as 'opencode' | 'claudcode');
+ await modelsCache.initialize(agent as 'opencode' | 'claudecode');
// Get models from cache
- const models = modelsCache.getModels(agent as 'opencode' | 'claudcode');
+ const models = modelsCache.getModels(agent as 'opencode' | 'claudecode');
const response = ModelsResponseSchema.parse({ data: models });
return reply.status(200).send(response);
} catch (error) {
@@ -199,16 +281,16 @@ export async function projectsRoutes(server: FastifyInstance) {
const { agent = 'opencode' } = request.query;
// Validate agent parameter
- if (agent !== 'opencode' && agent !== 'claudcode') {
+ if (agent !== 'opencode' && agent !== 'claudecode') {
return reply.status(400).send({
error: true,
- message: 'Invalid agent parameter. Must be "opencode" or "claudcode"',
+ message: 'Invalid agent parameter. Must be "opencode" or "claudecode"',
});
}
// Force refresh the models cache for the specific agent
- await modelsCache.refresh(agent as 'opencode' | 'claudcode');
- const models = modelsCache.getModels(agent as 'opencode' | 'claudcode');
+ await modelsCache.refresh(agent as 'opencode' | 'claudecode');
+ const models = modelsCache.getModels(agent as 'opencode' | 'claudecode');
const response = ModelsResponseSchema.parse({ data: models });
return reply.status(200).send(response);
} catch (error) {
@@ -234,11 +316,12 @@ export async function projectsRoutes(server: FastifyInstance) {
await gitService.validateRepo(repoPath);
const branches = gitService.listBranches(repoPath);
- const defaultBranch = gitService.getDefaultBranch(repoPath);
+ const currentBranch = gitService.getCurrentBranch(repoPath);
const response = BranchesResponseSchema.parse({
data: branches,
- defaultBranch,
+ defaultBranch: currentBranch,
+ currentBranch,
});
return reply.status(200).send(response);
} catch (error) {
@@ -328,20 +411,21 @@ export async function projectsRoutes(server: FastifyInstance) {
const syncCommitSha = await gitService.syncRelayToSource(
project.relayRepoPath,
project.sourceRepoPath,
- project.name
+ project.mirrorRepoPath,
+ project.id
);
// Get the commit SHA to use for marking PRs as synced
// If a new commit was created, use that SHA; otherwise use current HEAD of relay branch
// (if no changes, it means everything is already synced)
- const relayBranch = `relay-${project.name}`;
+ // Use default branch for commit SHA (relay has been merged into default)
const commitShaToUse =
- syncCommitSha || gitService.getRefSha(project.sourceRepoPath, relayBranch);
+ syncCommitSha || gitService.getRefSha(project.sourceRepoPath, project.defaultBranch);
// Mark all merged PRs as synced
const mergedPRs = await pullRequestsRepository.findByProjectId(project.id);
const unsyncedMergedPRs = mergedPRs.filter(
- (pr) => pr.status === 'merged' && !pr.syncedCommitSha
+ (pr) => pr.status === PR_STATUS_MERGED && !pr.syncedCommitSha
);
// Update all unsynced merged PRs with the sync commit SHA
@@ -407,6 +491,657 @@ export async function projectsRoutes(server: FastifyInstance) {
}
});
+ // ============================================================================
+ // Manual File Operations with WorkItem
+ // ============================================================================
+
+ /**
+ * Get or create a manual WorkItem for the current user session
+ * All manual file operations should use the same WorkItem
+ * Ensures idempotency - returns existing WorkItem if found
+ */
+ server.post<{ Params: { id: string } }>(
+ '/api/projects/:id/work-items/manual',
+ async (request, reply) => {
+ try {
+ const body = GetOrCreateManualWorkItemDTOSchema.parse(request.body);
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ // Look for existing open manual WorkItem for this project
+ // Use a more specific identifier to avoid conflicts
+ const existingWorkItems = await workItemsRepository.findByProjectId(project.id);
+ const existingManualWorkItem = existingWorkItems.find(
+ (wi) =>
+ wi.status === WORKITEM_STATUS_OPEN &&
+ (wi.title === 'Manual edit session' ||
+ wi.title.startsWith('Manual edit session') ||
+ (body.title && wi.title === body.title))
+ );
+
+ if (existingManualWorkItem) {
+ // Ensure workspace is initialized for existing WorkItem
+ await workspaceService.ensureWorkspace(existingManualWorkItem, project);
+ // Fetch updated WorkItem from repository
+ const updatedWorkItem = await workItemsRepository.findById(existingManualWorkItem.id);
+ if (!updatedWorkItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+ return reply.status(200).send(workItemToDTO(updatedWorkItem));
+ }
+
+ // Create new manual WorkItem
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: project.id,
+ type: 'feature-request',
+ title: body.title || 'Manual edit session',
+ body: 'Manual file editing session',
+ });
+
+ // Initialize workspace for the WorkItem
+ await workspaceService.initWorkspace(workItem.id, project);
+ // Fetch updated WorkItem from repository
+ const updatedWorkItem = await workItemsRepository.findById(workItem.id);
+ if (!updatedWorkItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ return reply.status(201).send(workItemToDTO(updatedWorkItem));
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Validation failed',
+ details: error.errors,
+ });
+ }
+
+ throw error;
+ }
+ }
+ );
+
+ /**
+ * Get files from WorkItem's worktree
+ */
+ server.get<{ Params: { id: string; workItemId: string } }>(
+ '/api/projects/:id/work-items/:workItemId/files',
+ async (request, reply) => {
+ try {
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ const files = await gitService.listFiles(updatedWorkItem.worktreePath);
+
+ const response = FilesResponseSchema.parse({ data: files });
+ return reply.status(200).send(response);
+ } catch (error) {
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to list files',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Get file content from WorkItem's worktree
+ */
+ server.get<{ Params: { id: string; workItemId: string }; Querystring: { path: string } }>(
+ '/api/projects/:id/work-items/:workItemId/files/content',
+ async (request, reply) => {
+ try {
+ const { path: filePath } = request.query;
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ if (!filePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'File path is required',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Check if file is binary or empty
+ const fullPath = path.join(updatedWorkItem.worktreePath, filePath);
+ const stats = await fs.stat(fullPath);
+ const isBinary = await (async () => {
+ try {
+ const content = await fs.readFile(fullPath, { encoding: 'utf-8' });
+ // Check for null bytes or other control characters (binary-ish)
+ for (let i = 0; i < content.length; i++) {
+ const code = content.charCodeAt(i);
+ if (code === 0) return true; // NUL
+ if (code < 9) return true; // C0 controls below tab
+ if (code > 13 && code < 32) return true; // C0 controls excluding \t,\n,\r
+ }
+ return false;
+ } catch {
+ return true;
+ }
+ })();
+
+ if (isBinary) {
+ return reply.status(200).send({
+ data: {
+ path: filePath,
+ content: null,
+ isBinary: true,
+ size: stats.size,
+ },
+ });
+ }
+
+ const content = await gitService.getFileContent(updatedWorkItem.worktreePath, filePath);
+
+ const response = FileContentResponseSchema.parse({
+ data: {
+ path: filePath,
+ content,
+ isBinary: false,
+ size: stats.size,
+ },
+ });
+ return reply.status(200).send(response);
+ } catch (error) {
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to read file',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Create a new file in the WorkItem's worktree
+ * Auto-commits with a sensible commit message
+ */
+ server.post<{ Params: { id: string; workItemId: string } }>(
+ '/api/projects/:id/work-items/:workItemId/files',
+ async (request, reply) => {
+ try {
+ const body = CreateFileDTOSchema.parse(request.body);
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Validate path (prevent directory traversal)
+ if (body.path.includes('..') || path.isAbsolute(body.path)) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Invalid file path',
+ });
+ }
+
+ // Create the file in the worktree
+ const filePath = path.join(updatedWorkItem.worktreePath, body.path);
+ const dirPath = path.dirname(filePath);
+
+ // Create directory if it doesn't exist
+ await fs.mkdir(dirPath, { recursive: true });
+ await fs.writeFile(filePath, body.content, 'utf-8');
+
+ // Auto-commit with sensible message
+ const commitMessage = `Add ${body.path}`;
+ const commitSha = gitService.commitChanges(updatedWorkItem.worktreePath, commitMessage);
+
+ // Update WorkItem with new head SHA
+ await workItemsRepository.update(updatedWorkItem.id, {
+ headSha: commitSha,
+ });
+
+ return reply.status(201).send({
+ success: true,
+ message: 'File created successfully',
+ path: body.path,
+ commitSha,
+ });
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Validation failed',
+ details: error.errors,
+ });
+ }
+
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to create file',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Update an existing file in the WorkItem's worktree
+ * Auto-commits with a sensible commit message
+ */
+ server.put<{ Params: { id: string; workItemId: string } }>(
+ '/api/projects/:id/work-items/:workItemId/files',
+ async (request, reply) => {
+ try {
+ const body = UpdateFileDTOSchema.parse(request.body);
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Validate path
+ if (body.path.includes('..') || path.isAbsolute(body.path)) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Invalid file path',
+ });
+ }
+
+ // Check if file exists
+ const filePath = path.join(updatedWorkItem.worktreePath, body.path);
+ try {
+ await fs.access(filePath);
+ } catch {
+ return reply.status(404).send({
+ error: true,
+ message: 'File not found',
+ });
+ }
+
+ // Update the file in the worktree
+ await fs.writeFile(filePath, body.content, 'utf-8');
+
+ // Auto-commit with sensible message
+ const commitMessage = `Update ${body.path}`;
+ const commitSha = gitService.commitChanges(updatedWorkItem.worktreePath, commitMessage);
+
+ // Update WorkItem with new head SHA
+ await workItemsRepository.update(updatedWorkItem.id, {
+ headSha: commitSha,
+ });
+
+ return reply.status(200).send({
+ success: true,
+ message: 'File updated successfully',
+ path: body.path,
+ commitSha,
+ });
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Validation failed',
+ details: error.errors,
+ });
+ }
+
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to update file',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Delete a file in the WorkItem's worktree
+ * Auto-commits with a sensible commit message
+ */
+ server.delete<{ Params: { id: string; workItemId: string }; Querystring: { path: string } }>(
+ '/api/projects/:id/work-items/:workItemId/files',
+ async (request, reply) => {
+ try {
+ const { path: filePath } = request.query;
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ if (!filePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'File path is required',
+ });
+ }
+
+ // Validate path
+ if (filePath.includes('..') || path.isAbsolute(filePath)) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Invalid file path',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Check if file exists
+ const fullPath = path.join(updatedWorkItem.worktreePath, filePath);
+ try {
+ await fs.access(fullPath);
+ } catch {
+ return reply.status(404).send({
+ error: true,
+ message: 'File not found',
+ });
+ }
+
+ // Delete the file in the worktree
+ await fs.unlink(fullPath);
+
+ // Auto-commit with sensible message
+ const commitMessage = `Delete ${filePath}`;
+ const commitSha = gitService.commitChanges(updatedWorkItem.worktreePath, commitMessage);
+
+ // Update WorkItem with new head SHA
+ await workItemsRepository.update(updatedWorkItem.id, {
+ headSha: commitSha,
+ });
+
+ return reply.status(200).send({
+ success: true,
+ message: 'File deleted successfully',
+ path: filePath,
+ commitSha,
+ });
+ } catch (error) {
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to delete file',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Commit changes in the WorkItem's worktree
+ */
+ server.post<{ Params: { id: string; workItemId: string } }>(
+ '/api/projects/:id/work-items/:workItemId/commit',
+ async (request, reply) => {
+ try {
+ const body = CommitChangesDTOSchema.parse(request.body);
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Check if there are any changes to commit
+ if (!gitService.hasAnyChanges(updatedWorkItem.worktreePath)) {
+ return reply.status(400).send({
+ error: true,
+ message: 'No changes to commit',
+ });
+ }
+
+ // Commit the changes
+ const commitSha = gitService.commitChanges(updatedWorkItem.worktreePath, body.message);
+
+ // Update WorkItem with new head SHA
+ const finalWorkItem = await workItemsRepository.update(workItem.id, {
+ headSha: commitSha,
+ });
+
+ return reply.status(200).send({
+ success: true,
+ message: 'Changes committed successfully',
+ commitSha,
+ workItem: finalWorkItem ? workItemToDTO(finalWorkItem) : workItemToDTO(workItem),
+ });
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Validation failed',
+ details: error.errors,
+ });
+ }
+
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to commit changes',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
+ /**
+ * Create a Pull Request from the WorkItem
+ * Ensures idempotency and commits any uncommitted changes before creating PR
+ */
+ server.post<{ Params: { id: string; workItemId: string } }>(
+ '/api/projects/:id/work-items/:workItemId/create-pr',
+ async (request, reply) => {
+ try {
+ const project = await projectsRepository.findById(request.params.id);
+
+ if (!project) {
+ return reply.status(404).send({
+ error: true,
+ message: 'Project not found',
+ });
+ }
+
+ const workItem = await workItemsRepository.findById(request.params.workItemId);
+
+ if (!workItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found',
+ });
+ }
+
+ // Ensure workspace is initialized
+ const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
+
+ if (!updatedWorkItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem workspace is not initialized',
+ });
+ }
+
+ // Check if PR already exists (idempotency)
+ const existingPR = await pullRequestsRepository.findByWorkItemId(workItem.id);
+ if (existingPR) {
+ return reply.status(200).send(pullRequestToDTO(existingPR));
+ }
+
+ // Commit any uncommitted changes before creating PR
+ if (gitService.hasAnyChanges(updatedWorkItem.worktreePath)) {
+ const commitMessage = 'Finish manual editing session';
+ const commitSha = gitService.commitChanges(updatedWorkItem.worktreePath, commitMessage);
+ await workItemsRepository.update(workItem.id, {
+ headSha: commitSha,
+ });
+ // Refresh workItem to get updated headSha
+ const refreshedWorkItem = await workItemsRepository.findById(workItem.id);
+ if (refreshedWorkItem) {
+ // Create PR using PRService
+ const pr = await prService.openPR(
+ refreshedWorkItem.id,
+ project.id,
+ refreshedWorkItem.title,
+ refreshedWorkItem.body,
+ refreshedWorkItem.headBranch || project.defaultBranch,
+ project.defaultBranch
+ );
+ if (!pr) {
+ return reply.status(400).send({
+ error: true,
+ message: 'No changes detected, cannot create PR',
+ });
+ }
+ return reply.status(201).send(pullRequestToDTO(pr));
+ }
+ }
+ } catch (error) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Failed to create PR',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+ );
+
server.delete<{ Params: { id: string } }>('/api/projects/:id', async (request, reply) => {
try {
const project = await projectsRepository.findById(request.params.id);
diff --git a/backend/src/routes/pullRequests.ts b/backend/src/routes/pullRequests.ts
index d5f8af2..78e4883 100644
--- a/backend/src/routes/pullRequests.ts
+++ b/backend/src/routes/pullRequests.ts
@@ -4,44 +4,49 @@ import { pullRequestsRepository } from '../repositories/PullRequestsRepository.j
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { projectsRepository } from '../repositories/ProjectsRepository.js';
import { prService } from '../services/PRService.js';
-import { gitService } from '../services/GitService.js';
+import { gitService } from '../services/git/GitService.js';
import { toDTO as pullRequestToDTO } from '../mappers/pullRequests.js';
export async function pullRequestsRoutes(server: FastifyInstance) {
- // GET /api/pull-requests - List PRs (with optional project filter and pagination)
- server.get<{ Querystring: { projectId?: string; page?: string; limit?: string } }>(
- '/api/pull-requests',
- async (request) => {
- const { projectId, page: pageStr, limit: limitStr } = request.query;
- const page = parseInt(pageStr || '1', 10);
- const limit = parseInt(limitStr || '10', 10);
-
- let prs: Awaited>;
-
- if (projectId) {
- prs = await pullRequestsRepository.findByProjectId(projectId);
- } else {
- // If no projectId, return all PRs (you might want to add a findAll method to repository)
- // For now, we'll return an empty array if no projectId is provided
- prs = [];
- }
+ // GET /api/pull-requests - List PRs (with optional project, status filter and pagination)
+ server.get<{
+ Querystring: { projectId?: string; status?: string; page?: string; limit?: string };
+ }>('/api/pull-requests', async (request) => {
+ const { projectId, status: statusFilter, page: pageStr, limit: limitStr } = request.query;
+ const page = parseInt(pageStr || '1', 10);
+ const limit = parseInt(limitStr || '10', 10);
+
+ let prs: Awaited>;
+
+ if (projectId) {
+ prs = await pullRequestsRepository.findByProjectId(projectId);
+ } else {
+ prs = await pullRequestsRepository.findAll();
+ }
- // Apply pagination
- const startIndex = (page - 1) * limit;
- const endIndex = startIndex + limit;
- const paginatedPRs = prs.slice(startIndex, endIndex);
-
- return {
- data: paginatedPRs.map(pullRequestToDTO),
- pagination: {
- page,
- limit,
- total: prs.length,
- totalPages: Math.ceil(prs.length / limit),
- },
- };
+ // Filter by status if provided (before pagination so each page has full page of matching items)
+ if (statusFilter && statusFilter !== 'all') {
+ const validStatuses = ['open', 'merged', 'closed'] as const;
+ if (validStatuses.includes(statusFilter as (typeof validStatuses)[number])) {
+ prs = prs.filter((pr) => pr.status === statusFilter);
+ }
}
- );
+
+ const total = prs.length;
+ const startIndex = (page - 1) * limit;
+ const endIndex = startIndex + limit;
+ const paginatedPRs = prs.slice(startIndex, endIndex);
+
+ return {
+ data: paginatedPRs.map(pullRequestToDTO),
+ pagination: {
+ page,
+ limit,
+ total,
+ totalPages: Math.ceil(total / limit),
+ },
+ };
+ });
// GET /api/pull-requests/:id - Get PR details
server.get<{ Params: { id: string } }>('/api/pull-requests/:id', async (request, reply) => {
diff --git a/backend/src/routes/reviews.ts b/backend/src/routes/reviews.ts
index 5dd644b..40c3ceb 100644
--- a/backend/src/routes/reviews.ts
+++ b/backend/src/routes/reviews.ts
@@ -13,7 +13,7 @@ import { reviewCommentsRepository } from '../repositories/ReviewCommentsReposito
import { agentRunsRepository } from '../repositories/AgentRunsRepository.js';
import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
-import { agentService } from '../services/AgentService.js';
+import { agentService } from '../services/agent/AgentService.js';
import { reviewThreadToDTO, reviewCommentToDTO } from '../mappers/reviews.js';
import { toDTO as agentRunToDTO } from '../mappers/agentRuns.js';
diff --git a/backend/src/routes/routes.test.ts b/backend/src/routes/routes.test.ts
index c3d48a8..7ac79f8 100644
--- a/backend/src/routes/routes.test.ts
+++ b/backend/src/routes/routes.test.ts
@@ -16,15 +16,8 @@ import { projectsRepository } from '../repositories/ProjectsRepository.js';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
import { agentRunsRepository } from '../repositories/AgentRunsRepository.js';
-import { targetReposRepository } from '../repositories/TargetReposRepository.js';
import { v4 as uuidv4 } from 'uuid';
-import {
- ProjectSchema,
- WorkItemSchema,
- PullRequestSchema,
- AgentRunSchema,
- TargetRepoSchema,
-} from 'git-vibe-shared';
+import { ProjectSchema, WorkItemSchema, PullRequestSchema, AgentRunSchema } from 'git-vibe-shared';
// Helper to create a test server
async function createTestServer() {
@@ -33,7 +26,6 @@ async function createTestServer() {
await server.register((await import('./workitems.js')).workitemsRoutes);
await server.register((await import('./pullRequests.js')).pullRequestsRoutes);
await server.register((await import('./agentRuns.js')).agentRunsRoutes);
- await server.register((await import('./targetRepos.js')).targetReposRoutes);
return server;
}
@@ -79,6 +71,7 @@ describe('Backend Routes - Response Schema Validation', () => {
id: uuidv4(),
name: `test-route-project-${Date.now()}`,
sourceRepoPath: '/tmp/test/source',
+ mirrorRepoPath: '/tmp/test/mirror.git',
relayRepoPath: '/tmp/test/relay',
defaultBranch: 'main',
});
@@ -123,6 +116,7 @@ describe('Backend Routes - Response Schema Validation', () => {
id: uuidv4(),
name: `test-workitem-project-${Date.now()}`,
sourceRepoPath: '/tmp/test/source',
+ mirrorRepoPath: '/tmp/test/mirror.git',
relayRepoPath: '/tmp/test/relay',
defaultBranch: 'main',
});
@@ -207,6 +201,7 @@ describe('Backend Routes - Response Schema Validation', () => {
id: uuidv4(),
name: `test-pr-project-${Date.now()}`,
sourceRepoPath: '/tmp/test/source',
+ mirrorRepoPath: '/tmp/test/mirror.git',
relayRepoPath: '/tmp/test/relay',
defaultBranch: 'main',
});
@@ -301,6 +296,7 @@ describe('Backend Routes - Response Schema Validation', () => {
id: uuidv4(),
name: `test-agentrun-project-${Date.now()}`,
sourceRepoPath: '/tmp/test/source',
+ mirrorRepoPath: '/tmp/test/mirror.git',
relayRepoPath: '/tmp/test/relay',
defaultBranch: 'main',
});
@@ -407,50 +403,4 @@ describe('Backend Routes - Response Schema Validation', () => {
expect(result.success).toBe(true);
});
});
-
- describe('TargetRepos routes', () => {
- it('GET /api/target-repos returns array matching schema', async () => {
- const response = await server.inject({
- method: 'GET',
- url: '/api/target-repos',
- });
-
- expect(response.statusCode).toBe(200);
- const body = JSON.parse(response.payload);
-
- // Validate each target repo in array
- if (Array.isArray(body) && body.length > 0) {
- body.forEach((repo: any) => {
- const result = TargetRepoSchema.safeParse(repo);
- expect(result.success).toBe(true);
- });
- }
- });
-
- it('GET /api/target-repos/:id returns single target repo matching schema', async () => {
- // Create a test target repo
- const targetRepo = await targetReposRepository.create({
- id: uuidv4(),
- name: `test-target-repo-${Date.now()}`,
- repoPath: '/tmp/test/target',
- defaultBranch: 'main',
- });
-
- const response = await server.inject({
- method: 'GET',
- url: `/api/target-repos/${targetRepo.id}`,
- });
-
- expect(response.statusCode).toBe(200);
- const body = JSON.parse(response.payload);
-
- // Validate against shared schema
- const result = TargetRepoSchema.safeParse(body);
- expect(result.success).toBe(true);
-
- // Verify date fields are in canonical ISO format
- expect(body.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
- expect(body.updatedAt).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
- });
- });
});
diff --git a/backend/src/routes/search.ts b/backend/src/routes/search.ts
new file mode 100644
index 0000000..347fd3b
--- /dev/null
+++ b/backend/src/routes/search.ts
@@ -0,0 +1,85 @@
+import type { FastifyInstance } from 'fastify';
+import { inArray, or, sql } from 'drizzle-orm';
+import { SearchResponseSchema } from 'git-vibe-shared';
+import { toDTO as projectToDTO } from '../mappers/projects.js';
+import { toDTO as workItemToDTO } from '../mappers/workItems.js';
+import { toDTO as pullRequestToDTO } from '../mappers/pullRequests.js';
+import { projects, workItems, pullRequests } from '../models/schema.js';
+import { getDb } from '../db/client.js';
+import type { Project, WorkItem } from '../types/models.js';
+
+export async function searchRoutes(server: FastifyInstance) {
+ // GET /api/search - Search across projects, work items, and pull requests
+ server.get<{ Querystring: { q?: string; limit?: string } }>('/api/search', async (request) => {
+ const query = request.query.q?.trim() || '';
+ const limit = parseInt(request.query.limit || '20', 10);
+
+ if (!query) {
+ return SearchResponseSchema.parse({
+ projects: [],
+ workItems: [],
+ pullRequests: [],
+ projectNames: {},
+ });
+ }
+
+ const db = await getDb();
+ const searchPattern = `%${query}%`;
+
+ // Search projects by name (case-insensitive)
+ const matchingProjects = await db
+ .select()
+ .from(projects)
+ .where(sql`LOWER(${projects.name}) LIKE LOWER(${searchPattern})`)
+ .limit(limit)
+ .execute();
+
+ // Search work items by title or body (case-insensitive)
+ const matchingWorkItems = await db
+ .select()
+ .from(workItems)
+ .where(
+ or(
+ sql`LOWER(${workItems.title}) LIKE LOWER(${searchPattern})`,
+ sql`LOWER(${workItems.body}) LIKE LOWER(${searchPattern})`
+ )!
+ )
+ .limit(limit)
+ .execute();
+
+ // Search pull requests by title or description (case-insensitive)
+ const matchingPullRequests = await db
+ .select()
+ .from(pullRequests)
+ .where(
+ or(
+ sql`LOWER(${pullRequests.title}) LIKE LOWER(${searchPattern})`,
+ sql`LOWER(${pullRequests.description}) LIKE LOWER(${searchPattern})`
+ )!
+ )
+ .limit(limit)
+ .execute();
+
+ // Build projectNames map for work items and PRs (projectId -> name)
+ const projectIds = new Set();
+ for (const wi of matchingWorkItems) projectIds.add(wi.projectId);
+ for (const pr of matchingPullRequests) projectIds.add(pr.projectId);
+ const projectNames: Record = {};
+ if (projectIds.size > 0) {
+ const projectRows = await db
+ .select({ id: projects.id, name: projects.name })
+ .from(projects)
+ .where(inArray(projects.id, [...projectIds]))
+ .execute();
+ for (const p of projectRows) projectNames[p.id] = p.name;
+ }
+
+ const payload = {
+ projects: matchingProjects.map((p) => projectToDTO(p as Project)),
+ workItems: matchingWorkItems.map((wi) => workItemToDTO(wi as WorkItem)),
+ pullRequests: matchingPullRequests.map(pullRequestToDTO),
+ projectNames,
+ };
+ return SearchResponseSchema.parse(payload);
+ });
+}
diff --git a/backend/src/routes/settings.ts b/backend/src/routes/settings.ts
new file mode 100644
index 0000000..5d2d43b
--- /dev/null
+++ b/backend/src/routes/settings.ts
@@ -0,0 +1,90 @@
+import type { FastifyInstance } from 'fastify';
+import { z } from 'zod';
+import { settingsRepository } from '../repositories/SettingsRepository.js';
+import { AgentKeySchema } from 'git-vibe-shared';
+
+const GlobalSettingsResponseSchema = z.object({
+ defaultAgent: AgentKeySchema,
+ agentParams: z.record(z.unknown()),
+});
+
+const UpdateGlobalSettingsSchema = z.object({
+ defaultAgent: AgentKeySchema.optional(),
+ agentParams: z.record(z.unknown()).optional(),
+});
+
+export async function settingsRoutes(server: FastifyInstance) {
+ server.get('/api/settings', async (_request, reply) => {
+ try {
+ const settings = await settingsRepository.getGlobalSettings();
+ let agentParams: Record = {};
+ try {
+ agentParams = JSON.parse(settings.defaultAgentParams || '{}') as Record;
+ } catch {
+ agentParams = {};
+ }
+ const response = GlobalSettingsResponseSchema.parse({
+ defaultAgent: settings.defaultAgent as 'opencode' | 'claudecode',
+ agentParams,
+ });
+ return reply.status(200).send(response);
+ } catch (error) {
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to get settings',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ });
+
+ server.patch('/api/settings', async (request, reply) => {
+ try {
+ const body = UpdateGlobalSettingsSchema.parse(request.body);
+ const updates: { defaultAgent?: string; defaultAgentParams?: string } = {};
+ if (body.defaultAgent !== undefined) {
+ updates.defaultAgent = body.defaultAgent;
+ }
+ if (body.agentParams !== undefined) {
+ updates.defaultAgentParams = JSON.stringify(body.agentParams);
+ }
+ if (Object.keys(updates).length === 0) {
+ const current = await settingsRepository.getGlobalSettings();
+ let agentParams: Record = {};
+ try {
+ agentParams = JSON.parse(current.defaultAgentParams || '{}') as Record;
+ } catch {
+ agentParams = {};
+ }
+ return reply.status(200).send({
+ defaultAgent: current.defaultAgent,
+ agentParams,
+ });
+ }
+ const updated = await settingsRepository.updateGlobalSettings(updates);
+ let agentParams: Record = {};
+ try {
+ agentParams = JSON.parse(updated.defaultAgentParams || '{}') as Record;
+ } catch {
+ agentParams = {};
+ }
+ const response = GlobalSettingsResponseSchema.parse({
+ defaultAgent: updated.defaultAgent as 'opencode' | 'claudecode',
+ agentParams,
+ });
+ return reply.status(200).send(response);
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ return reply.status(400).send({
+ error: true,
+ message: 'Validation failed',
+ details: error.errors,
+ });
+ }
+ return reply.status(500).send({
+ error: true,
+ message: 'Failed to update settings',
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ });
+}
diff --git a/backend/src/routes/targetRepos.ts b/backend/src/routes/targetRepos.ts
deleted file mode 100644
index 8fc0404..0000000
--- a/backend/src/routes/targetRepos.ts
+++ /dev/null
@@ -1,57 +0,0 @@
-import type { FastifyInstance } from 'fastify';
-import { z } from 'zod';
-import { v4 as uuidv4 } from 'uuid';
-import { CreateTargetRepoDTOSchema } from 'git-vibe-shared';
-import { targetReposRepository } from '../repositories/TargetReposRepository.js';
-import { gitService } from '../services/GitService.js';
-import { toDTO as targetRepoToDTO } from '../mappers/targetRepos.js';
-
-export async function targetReposRoutes(server: FastifyInstance) {
- server.post('/api/target-repos', async (request, reply) => {
- try {
- const body = CreateTargetRepoDTOSchema.parse(request.body);
-
- await gitService.validateRepo(body.repoPath);
-
- const defaultBranch = gitService.getDefaultBranch(body.repoPath);
-
- const targetRepo = await targetReposRepository.create({
- id: uuidv4(),
- name: body.name,
- repoPath: body.repoPath,
- defaultBranch,
- });
-
- return reply.status(201).send(targetRepoToDTO(targetRepo));
- } catch (error) {
- if (error instanceof z.ZodError) {
- return reply.status(400).send({
- error: true,
- message: 'Validation failed',
- details: error.errors,
- });
- }
-
- throw error;
- }
- });
-
- server.get('/api/target-repos', async () => {
- const targetRepos = await targetReposRepository.findAll();
- return targetRepos.map(targetRepoToDTO);
- });
-
- server.get<{ Params: { id: string } }>('/api/target-repos/:id', async (request) => {
- const targetRepo = await targetReposRepository.findById(request.params.id);
-
- if (!targetRepo) {
- return {
- error: true,
- message: 'Target repo not found',
- statusCode: 404,
- };
- }
-
- return targetRepoToDTO(targetRepo);
- });
-}
diff --git a/backend/src/routes/workflows.ts b/backend/src/routes/workflows.ts
new file mode 100644
index 0000000..540c282
--- /dev/null
+++ b/backend/src/routes/workflows.ts
@@ -0,0 +1,605 @@
+import type { FastifyInstance } from 'fastify';
+import {
+ WorkflowSchema,
+ CreateWorkflowDTOSchema,
+ UpdateWorkflowDTOSchema,
+ ExecuteWorkflowDTOSchema,
+ type Workflow,
+} from 'git-vibe-shared';
+import { workflowValidationService } from '../services/workflow/WorkflowValidationService.js';
+import { workflowExecutionService } from '../services/workflow/WorkflowExecutionService.js';
+import type { WorkflowRecord, NodeRunRecord } from '../repositories/WorkflowsRepository.js';
+import {
+ createDefaultWorkflow,
+ getDefaultWorkflowVersion,
+ getWorkflowVersion,
+} from '../services/workflow/defaultWorkflow.js';
+import { workflowsRepository } from '../repositories/WorkflowsRepository.js';
+import { projectsRepository } from '../repositories/ProjectsRepository.js';
+
+export const workflowRoutes = async (fastify: FastifyInstance): Promise => {
+ fastify.get<{ Querystring: { projectId?: string; page?: string; limit?: string } }>(
+ '/api/workflows',
+ async (request, reply) => {
+ try {
+ const { projectId, page: pageStr, limit: limitStr } = request.query;
+ const page = parseInt(pageStr || '1', 10);
+ const limit = parseInt(limitStr || '50', 10);
+
+ if (!projectId) {
+ return reply.code(400).send({
+ error: true,
+ message: 'projectId query parameter is required',
+ });
+ }
+
+ const workflowsRepo = workflowsRepository;
+
+ // Verify project exists
+ const project = await projectsRepository.findById(projectId);
+ if (!project) {
+ return reply.code(404).send({
+ error: true,
+ message: `Project not found: ${projectId}`,
+ });
+ }
+
+ let allWorkflows = await workflowsRepo.findByProjectId(projectId);
+
+ // Ensure default workflow exists and has the correct version
+ const expectedDefaultWorkflow = createDefaultWorkflow(projectId);
+ const CURRENT_VERSION = getDefaultWorkflowVersion();
+ const expectedWorkflowId = expectedDefaultWorkflow.workflow.id;
+
+ let defaultWorkflowRecord = await workflowsRepo.findDefault(projectId);
+
+ // Check if default workflow exists and has correct version
+ if (!defaultWorkflowRecord) {
+ // No default workflow exists for this project, create it
+ defaultWorkflowRecord = await workflowsRepo.create({
+ id: expectedWorkflowId,
+ projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: CURRENT_VERSION,
+ });
+ } else {
+ // Default workflow exists, check version
+ const dbVersion =
+ defaultWorkflowRecord.version ||
+ getWorkflowVersion(defaultWorkflowRecord.definition) ||
+ 1;
+
+ if (dbVersion < CURRENT_VERSION) {
+ // Version is outdated, update it
+ const oldId = defaultWorkflowRecord.id;
+
+ // If ID changed (due to version change), preserve old version and create new default
+ if (oldId !== expectedWorkflowId) {
+ // Mark old workflow as non-default (preserve for traceability)
+ await workflowsRepo.update(oldId, {
+ isDefault: false,
+ });
+ // Create new default workflow with new ID
+ defaultWorkflowRecord = await workflowsRepo.create({
+ id: expectedWorkflowId,
+ projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: CURRENT_VERSION,
+ });
+ } else {
+ // Same ID, just update the definition (preserve old version in history if needed)
+ defaultWorkflowRecord = await workflowsRepo.update(oldId, {
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ version: CURRENT_VERSION,
+ isDefault: true,
+ });
+
+ // If update failed, create new workflow with new ID and preserve old one
+ if (!defaultWorkflowRecord) {
+ // Mark old as non-default
+ await workflowsRepo.update(oldId, {
+ isDefault: false,
+ });
+ // Create new default
+ defaultWorkflowRecord = await workflowsRepo.create({
+ id: expectedWorkflowId,
+ projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: CURRENT_VERSION,
+ });
+ }
+ }
+ }
+ }
+
+ // Refresh workflows list after potential updates
+ allWorkflows = await workflowsRepo.findByProjectId(projectId);
+
+ const workflowsData = allWorkflows.map((w: WorkflowRecord) => ({
+ id: w.id,
+ name: w.name,
+ description:
+ (typeof w.definition === 'string'
+ ? (JSON.parse(w.definition) as Workflow).workflow.description
+ : (w.definition as Workflow).workflow.description) ?? '',
+ definition: typeof w.definition === 'string' ? JSON.parse(w.definition) : w.definition,
+ isDefault: w.isDefault,
+ createdAt: w.createdAt.toISOString(),
+ updatedAt: w.updatedAt.toISOString(),
+ }));
+
+ // Apply pagination
+ const startIndex = (page - 1) * limit;
+ const endIndex = startIndex + limit;
+ const paginatedWorkflows = workflowsData.slice(startIndex, endIndex);
+
+ return reply.send({
+ data: paginatedWorkflows,
+ pagination: {
+ page,
+ limit,
+ total: workflowsData.length,
+ totalPages: Math.ceil(workflowsData.length / limit),
+ },
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to list workflows: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+
+ fastify.get<{ Params: { id: string } }>('/api/workflows/:id', async (request, reply) => {
+ try {
+ const { id } = request.params;
+ const workflowsRepo = workflowsRepository;
+
+ const workflowRecord = await workflowsRepo.findById(id);
+
+ if (!workflowRecord) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${id}`,
+ });
+ }
+
+ // Handle both string and object definitions
+ const workflow: Workflow =
+ typeof workflowRecord.definition === 'string'
+ ? JSON.parse(workflowRecord.definition)
+ : (workflowRecord.definition as Workflow);
+
+ return reply.send({
+ data: {
+ id: workflowRecord.id,
+ name: workflow.workflow.name,
+ description: workflow.workflow.description,
+ definition: workflow,
+ isDefault: workflowRecord.isDefault,
+ createdAt: workflowRecord.createdAt.toISOString(),
+ updatedAt: workflowRecord.updatedAt.toISOString(),
+ },
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to get workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ });
+
+ fastify.post<{ Querystring: { projectId?: string } }>(
+ '/api/workflows',
+ async (request, reply) => {
+ try {
+ const { projectId } = request.query;
+ if (!projectId) {
+ return reply.code(400).send({
+ error: true,
+ message: 'projectId query parameter is required',
+ });
+ }
+
+ const body = CreateWorkflowDTOSchema.parse(request.body);
+
+ // Use version from body.definition if provided, otherwise default to 1
+ const workflowVersion = body.definition?.version ?? 1;
+
+ const validated = WorkflowSchema.safeParse({
+ version: workflowVersion,
+ workflow: {
+ ...body.definition,
+ name: body.name,
+ description: body.description,
+ },
+ });
+
+ if (!validated.success) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Invalid workflow definition',
+ details: validated.error.errors,
+ });
+ }
+
+ const validation = workflowValidationService.validateWorkflow(validated.data);
+
+ if (!validation.valid) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Workflow validation failed',
+ details: validation.errors,
+ });
+ }
+
+ const workflowsRepo = workflowsRepository;
+
+ // Verify project exists
+ const project = await projectsRepository.findById(projectId);
+ if (!project) {
+ return reply.code(404).send({
+ error: true,
+ message: `Project not found: ${projectId}`,
+ });
+ }
+
+ // If setting as default, unset other defaults for this project
+ if (body.isDefault) {
+ const existingDefaults = await workflowsRepo.findByProjectId(projectId);
+ for (const wf of existingDefaults) {
+ if (wf.isDefault) {
+ await workflowsRepo.update(wf.id, { isDefault: false });
+ }
+ }
+ }
+
+ const workflowRecord = await workflowsRepo.create({
+ id: crypto.randomUUID(),
+ projectId,
+ name: body.name,
+ definition: validated.data,
+ isDefault: body.isDefault ?? false,
+ });
+
+ // Handle both string and object definitions
+ const workflow: Workflow =
+ typeof workflowRecord.definition === 'string'
+ ? JSON.parse(workflowRecord.definition)
+ : (workflowRecord.definition as Workflow);
+
+ return reply.code(201).send({
+ data: {
+ id: workflowRecord.id,
+ name: workflowRecord.name,
+ description: workflow.workflow.description,
+ definition: validated.data,
+ isDefault: workflowRecord.isDefault,
+ createdAt: workflowRecord.createdAt.toISOString(),
+ updatedAt: workflowRecord.updatedAt.toISOString(),
+ },
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to create workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+
+ fastify.patch<{ Params: { id: string } }>('/api/workflows/:id', async (request, reply) => {
+ try {
+ const { id } = request.params;
+ const body = UpdateWorkflowDTOSchema.parse(request.body);
+
+ if (!body.definition) {
+ return reply.code(400).send({
+ error: true,
+ message: 'definition is required',
+ });
+ }
+
+ const workflowsRepo = workflowsRepository;
+ const existing = await workflowsRepo.findById(id);
+
+ if (!existing) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${id}`,
+ });
+ }
+
+ // Use version from body.definition if provided, otherwise use existing version
+ const existingWorkflow: Workflow =
+ typeof existing.definition === 'string'
+ ? JSON.parse(existing.definition)
+ : (existing.definition as Workflow);
+ const workflowVersion =
+ body.definition?.version ?? existingWorkflow.version ?? existing.version ?? 1;
+
+ const validated = WorkflowSchema.safeParse({
+ version: workflowVersion,
+ workflow: {
+ ...body.definition,
+ name: body.name ?? body.definition.name,
+ description: body.description ?? body.definition.description,
+ },
+ });
+
+ if (!validated.success) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Invalid workflow definition',
+ details: validated.error.errors,
+ });
+ }
+
+ const validation = workflowValidationService.validateWorkflow(validated.data);
+
+ if (!validation.valid) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Workflow validation failed',
+ details: validation.errors,
+ });
+ }
+
+ // Validate backbone modifications for immutable nodes
+ // existingWorkflow already parsed above
+ const backboneValidation = workflowValidationService.validateBackboneModification(
+ existingWorkflow,
+ validated.data
+ );
+
+ if (!backboneValidation.allowed) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Backbone modification not allowed',
+ details: backboneValidation.errors,
+ });
+ }
+
+ // If setting as default, unset other defaults for this project
+ if (body.isDefault) {
+ const existingDefaults = await workflowsRepo.findByProjectId(existing.projectId);
+ for (const wf of existingDefaults) {
+ if (wf.isDefault && wf.id !== id) {
+ await workflowsRepo.update(wf.id, { isDefault: false });
+ }
+ }
+ }
+
+ const updated = await workflowsRepo.update(id, {
+ name: body.name,
+ definition: validated.data,
+ isDefault: body.isDefault,
+ });
+
+ if (!updated) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${id}`,
+ });
+ }
+
+ // Handle both string and object definitions
+ const workflow: Workflow =
+ typeof updated.definition === 'string'
+ ? JSON.parse(updated.definition)
+ : (updated.definition as Workflow);
+
+ return reply.send({
+ data: {
+ id: updated.id,
+ name: updated.name,
+ description: workflow.workflow.description,
+ definition: validated.data,
+ isDefault: updated.isDefault,
+ createdAt: updated.createdAt.toISOString(),
+ updatedAt: updated.updatedAt.toISOString(),
+ },
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to update workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ });
+
+ fastify.delete<{ Params: { id: string } }>('/api/workflows/:id', async (request, reply) => {
+ try {
+ const { id } = request.params;
+ const workflowsRepo = workflowsRepository;
+ const existing = await workflowsRepo.findById(id);
+
+ if (!existing) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${id}`,
+ });
+ }
+
+ if (existing.isDefault) {
+ return reply.code(400).send({
+ error: true,
+ message: 'Cannot delete default workflow',
+ });
+ }
+
+ await workflowsRepo.delete(id);
+
+ return reply.code(204).send();
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to delete workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ });
+
+ fastify.post<{ Params: { workflowId: string } }>(
+ '/api/workflows/:workflowId/execute',
+ async (request, reply) => {
+ try {
+ const { workflowId } = request.params;
+ const { workItemId } = ExecuteWorkflowDTOSchema.parse(request.body);
+
+ const workflowsRepo = workflowsRepository;
+ const workflowRecord = await workflowsRepo.findById(workflowId);
+
+ if (!workflowRecord) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${workflowId}`,
+ });
+ }
+
+ const workflowRun = await workflowExecutionService.execute(workflowId, workItemId);
+
+ return reply.send({
+ data: {
+ id: workflowRun.id,
+ workflowId: workflowRun.workflowId,
+ workItemId: workflowRun.workItemId,
+ status: workflowRun.status,
+ currentStepId: workflowRun.currentStepId,
+ startedAt: workflowRun.startedAt,
+ finishedAt: workflowRun.finishedAt,
+ createdAt: workflowRun.createdAt,
+ },
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to execute workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+
+ fastify.get<{ Params: { workflowId: string }; Querystring: { workItemId?: string } }>(
+ '/api/workflows/:workflowId/runs',
+ async (request, reply) => {
+ try {
+ const { workflowId } = request.params;
+ const { workItemId } = request.query;
+
+ const workflowsRepo = workflowsRepository;
+ const workflowRecord = await workflowsRepo.findById(workflowId);
+
+ if (!workflowRecord) {
+ return reply.code(404).send({
+ error: true,
+ message: `Workflow not found: ${workflowId}`,
+ });
+ }
+
+ const runs = await workflowsRepo.findAllRuns(workItemId, workflowId);
+
+ const runsData = runs.map((r) => ({
+ id: r.id,
+ workflowId: r.workflowId,
+ workItemId: r.workItemId,
+ status: r.status,
+ currentStepId: r.currentStepId,
+ startedAt: r.startedAt?.toISOString() ?? null,
+ finishedAt: r.finishedAt?.toISOString() ?? null,
+ createdAt: r.createdAt.toISOString(),
+ }));
+
+ return reply.send({
+ data: runsData,
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to list workflow runs: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+
+ fastify.get<{ Params: { runId: string } }>(
+ '/api/workflow-runs/:runId',
+ async (request, reply) => {
+ try {
+ const { runId } = request.params;
+ const workflowsRepo = workflowsRepository;
+
+ const runRecord = await workflowsRepo.findRunById(runId);
+
+ if (!runRecord) {
+ return reply.code(404).send({
+ error: true,
+ message: `Run not found: ${runId}`,
+ });
+ }
+
+ const steps = await workflowsRepo.findNodeRunsByWorkflowRunId(runId);
+
+ const stepsData = steps.map((s: NodeRunRecord) => ({
+ id: s.id,
+ runId: s.runId,
+ nodeId: s.nodeId,
+ status: s.status,
+ startedAt: s.startedAt?.toISOString() ?? null,
+ finishedAt: s.finishedAt?.toISOString() ?? null,
+ error: s.error ?? null,
+ output: typeof s.output === 'string' ? JSON.parse(s.output) : s.output,
+ }));
+
+ return reply.send({
+ data: stepsData,
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to get run details: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+
+ // Alias for frontend compatibility
+ fastify.get<{ Params: { runId: string } }>(
+ '/api/workflow-runs/:runId/steps',
+ async (request, reply) => {
+ try {
+ const { runId } = request.params;
+ const workflowsRepo = workflowsRepository;
+
+ const steps = await workflowsRepo.findNodeRunsByWorkflowRunId(runId);
+
+ const stepsData = steps.map((s: NodeRunRecord) => ({
+ id: s.id,
+ runId: s.runId,
+ nodeId: s.nodeId,
+ status: s.status,
+ startedAt: s.startedAt?.toISOString() ?? null,
+ finishedAt: s.finishedAt?.toISOString() ?? null,
+ error: s.error ?? null,
+ output: typeof s.output === 'string' ? JSON.parse(s.output) : s.output,
+ }));
+
+ return reply.send({
+ data: stepsData,
+ });
+ } catch (error) {
+ return reply.code(500).send({
+ error: true,
+ message: `Failed to get run steps: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ });
+ }
+ }
+ );
+};
diff --git a/backend/src/routes/workitems.ts b/backend/src/routes/workitems.ts
index 942b7a9..815cb8e 100644
--- a/backend/src/routes/workitems.ts
+++ b/backend/src/routes/workitems.ts
@@ -1,16 +1,22 @@
import type { FastifyInstance } from 'fastify';
import { z } from 'zod';
import { v4 as uuidv4 } from 'uuid';
-import { CreateWorkItemDTOSchema, UpdateWorkItemDTOSchema } from 'git-vibe-shared';
+import {
+ CreateWorkItemDTOSchema,
+ UpdateWorkItemDTOSchema,
+ WORKITEM_STATUS_CLOSED,
+} from 'git-vibe-shared';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
import { projectsRepository } from '../repositories/ProjectsRepository.js';
-import { agentService } from '../services/AgentService.js';
+import { agentService } from '../services/agent/AgentService.js';
import { workspaceService } from '../services/WorkspaceService.js';
import { prService } from '../services/PRService.js';
+import { workItemEventService } from '../services/WorkItemEventService.js';
import { toDTO as workItemToDTO } from '../mappers/workItems.js';
import { toDTO as pullRequestToDTO } from '../mappers/pullRequests.js';
import { toDTO as agentRunToDTO } from '../mappers/agentRuns.js';
+import { workflowEventBus } from '../services/workflow/WorkflowEventBus.js';
export async function workitemsRoutes(server: FastifyInstance) {
// POST /api/workitems - Create new WorkItem and automatically start agent
@@ -27,8 +33,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
});
}
- // Create WorkItem in database
- const workItem = await workItemsRepository.create({
+ // Create WorkItem via event service (emits workitem.created event which triggers workflow)
+ const workItem = await workItemEventService.createWorkItem({
id: uuidv4(),
projectId: body.projectId,
type: body.type,
@@ -36,18 +42,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
body: body.body,
});
- // Automatically execute task: initialize workspace and start agent
- // This runs asynchronously and doesn't block the response
- agentService
- .executeTask(workItem.projectId, workItem.id, workItem.title, workItem.body || undefined)
- .then(() => {
- console.log(`Task started successfully for work item ${workItem.id}`);
- })
- .catch((error) => {
- const errorMessage = error instanceof Error ? error.message : String(error);
- console.error(`Failed to execute task for work item ${workItem.id}:`, errorMessage);
- console.error('Full error details:', error);
- });
+ // Event service emits workitem.created event, which triggers workflow execution
+ // No need to call agentService.executeTask directly
return reply.status(201).send(workItemToDTO(workItem));
} catch (error) {
@@ -80,8 +76,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
});
}
- // Create WorkItem in database
- const workItem = await workItemsRepository.create({
+ // Create WorkItem via event service (emits workitem.created event which triggers workflow)
+ const workItem = await workItemEventService.createWorkItem({
id: uuidv4(),
projectId,
type: body.type,
@@ -89,18 +85,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
body: body.body,
});
- // Automatically execute task: initialize workspace and start agent
- // This runs asynchronously and doesn't block the response
- agentService
- .executeTask(workItem.projectId, workItem.id, workItem.title, workItem.body || undefined)
- .then(() => {
- console.log(`Task started successfully for work item ${workItem.id}`);
- })
- .catch((error) => {
- const errorMessage = error instanceof Error ? error.message : String(error);
- console.error(`Failed to execute task for work item ${workItem.id}:`, errorMessage);
- console.error('Full error details:', error);
- });
+ // Event service emits workitem.created event, which triggers workflow execution
+ // No need to call agentService.executeTask directly
return reply.status(201).send(workItemToDTO(workItem));
} catch (error) {
@@ -183,7 +169,19 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
try {
- const updatedWorkItem = await workspaceService.initWorkspace(workItem, project);
+ // Initialize workspace (stateless - returns state)
+ const workspaceState = await workspaceService.initWorkspace(workItem.id, project);
+ // Update WorkItem state via event service (which emits events and triggers workflow)
+ const updatedWorkItem = await workItemEventService.updateWorkItemState(
+ workItem.id,
+ workspaceState
+ );
+ if (!updatedWorkItem) {
+ return reply.status(404).send({
+ error: true,
+ message: 'WorkItem not found after workspace initialization',
+ });
+ }
return reply.status(200).send(workItemToDTO(updatedWorkItem));
} catch (error) {
return reply.status(500).send({
@@ -217,10 +215,29 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
// Find the most recent agent run for this WorkItem that has a sessionId
- const allAgentRuns = await agentService.getWorkItemTasks(request.params.id);
- const latestRunWithSession = allAgentRuns
- .filter((run) => run.sessionId)
- .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())[0];
+ const allTasks = await agentService.getWorkItemTasks(request.params.id);
+ // Get agent runs for tasks that have them
+ const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
+ const tasksWithRuns = await Promise.all(
+ allTasks
+ .filter((t) => t.currentAgentRunId)
+ .map(async (t) => {
+ const run = await agentRunsRepository.findById(t.currentAgentRunId!);
+ return run ? { task: t, run } : null;
+ })
+ );
+ const latestRunWithSession = tasksWithRuns
+ .filter(
+ (
+ tr
+ ): tr is {
+ task: (typeof allTasks)[0];
+ run: NonNullable<(typeof tasksWithRuns)[0]>['run'];
+ } => tr !== null && tr.run.sessionId !== null
+ )
+ .sort(
+ (a, b) => new Date(b.run.createdAt).getTime() - new Date(a.run.createdAt).getTime()
+ )[0];
if (!latestRunWithSession) {
return reply.status(400).send({
@@ -229,8 +246,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
});
}
- // Resume the task using the same session
- const agentRun = await agentService.resumeTask(latestRunWithSession.id, prompt);
+ // Resume the task using the same session; returns AgentRun
+ const agentRun = await agentService.resumeTask(latestRunWithSession.task.id, prompt);
return reply.status(201).send(agentRunToDTO(agentRun));
} catch (error) {
return reply.status(400).send({
@@ -254,7 +271,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
});
}
- const updated = await workItemsRepository.update(request.params.id, body);
+ // Update WorkItem via event service (emits workitem.updated/status.changed events)
+ const updated = await workItemEventService.updateWorkItem(request.params.id, body);
if (!updated) {
return reply.status(404).send({
@@ -263,15 +281,8 @@ export async function workitemsRoutes(server: FastifyInstance) {
});
}
- // If WorkItem is being closed, clean up its worktree
- if (body.status === 'closed') {
- const project = await projectsRepository.findById(workItem.projectId);
- if (project) {
- workspaceService.removeWorktree(workItem, project).catch((error) => {
- console.error(`Failed to clean up worktree for WorkItem ${request.params.id}:`, error);
- });
- }
- }
+ // If WorkItem is being closed, workflow will handle cleanup via workitem.closed event
+ // No need to directly call workspaceService.removeWorktree
return reply.status(200).send(workItemToDTO(updated));
} catch (error) {
@@ -341,7 +352,21 @@ export async function workitemsRoutes(server: FastifyInstance) {
const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
// Create PR using PRService
- const pr = await prService.openPR(updatedWorkItem, project);
+ const pr = await prService.openPR(
+ updatedWorkItem.id,
+ project.id,
+ updatedWorkItem.title,
+ updatedWorkItem.body,
+ updatedWorkItem.headBranch || project.defaultBranch,
+ project.defaultBranch
+ );
+
+ if (!pr) {
+ return reply.status(400).send({
+ error: true,
+ message: 'No changes detected, cannot create PR',
+ });
+ }
return reply.status(201).send(pullRequestToDTO(pr));
} catch (error) {
@@ -367,7 +392,7 @@ export async function workitemsRoutes(server: FastifyInstance) {
return pr ? [pullRequestToDTO(pr)] : [];
});
- // GET /api/workitems/:id/tasks - Get all agent tasks for a WorkItem
+ // GET /api/workitems/:id/tasks - Get all tasks for a WorkItem
server.get<{ Params: { id: string } }>('/api/workitems/:id/tasks', async (request, reply) => {
const workItem = await workItemsRepository.findById(request.params.id);
if (!workItem) {
@@ -378,7 +403,54 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
const tasks = await agentService.getWorkItemTasks(request.params.id);
- return tasks.map(agentRunToDTO);
+ const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
+ const result = await Promise.all(
+ tasks.map(async (task) => {
+ if (task.currentAgentRunId) {
+ const agentRun = await agentRunsRepository.findById(task.currentAgentRunId);
+ if (agentRun) {
+ return agentRunToDTO(agentRun);
+ }
+ }
+ // If no agent run, return a synthetic AgentRunDTO from task
+ return {
+ id: task.id,
+ projectId: workItem.projectId,
+ workItemId: task.workItemId,
+ taskId: task.id,
+ agentKey: 'opencode' as const,
+ status:
+ task.status === 'pending'
+ ? 'queued'
+ : task.status === 'running'
+ ? 'running'
+ : task.status === 'succeeded'
+ ? 'succeeded'
+ : task.status === 'failed'
+ ? 'failed'
+ : 'cancelled',
+ inputSummary: task.taskType,
+ inputJson: JSON.stringify({ taskType: task.taskType, ...task.input }),
+ sessionId: null,
+ linkedAgentRunId: null,
+ log: null,
+ logPath: null,
+ stdoutPath: null,
+ stderrPath: null,
+ headShaBefore: null,
+ headShaAfter: null,
+ commitSha: null,
+ pid: null,
+ idempotencyKey: task.idempotencyKey,
+ nodeRunId: task.nodeRunId ?? null,
+ startedAt: null,
+ finishedAt: null,
+ createdAt: task.createdAt.toISOString(),
+ updatedAt: task.updatedAt.toISOString(),
+ };
+ })
+ );
+ return result;
});
// POST /api/workitems/:id/tasks/:taskId/cancel - Cancel a running task
@@ -421,8 +493,20 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
try {
- const agentRun = await agentService.restartTask(request.params.taskId);
- return reply.status(201).send(agentRunToDTO(agentRun));
+ const task = await agentService.restartTask(request.params.taskId);
+ if (task.currentAgentRunId) {
+ const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
+ const agentRun = await agentRunsRepository.findById(task.currentAgentRunId);
+ if (agentRun) {
+ return reply.status(201).send(agentRunToDTO(agentRun));
+ }
+ }
+ // Return task info if no agent run
+ return reply.status(201).send({
+ id: task.id,
+ taskType: task.taskType,
+ status: task.status,
+ });
} catch (error) {
return reply.status(404).send({
error: true,
@@ -445,8 +529,18 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
try {
- const status = await agentService.getTaskStatus(request.params.taskId);
- return status;
+ const statusResult = await agentService.getTaskStatus(request.params.taskId);
+ if (statusResult.agentRun) {
+ return {
+ status: statusResult.status,
+ agentRun: agentRunToDTO(statusResult.agentRun),
+ };
+ }
+ // Return task status if no agent run
+ return {
+ status: statusResult.status,
+ task: statusResult.task,
+ };
} catch (error) {
return reply.status(404).send({
error: true,
@@ -501,7 +595,7 @@ export async function workitemsRoutes(server: FastifyInstance) {
}
// Check if WorkItem is already closed
- if (workItem.status === 'closed') {
+ if (workItem.status === WORKITEM_STATUS_CLOSED) {
return reply.status(400).send({
error: true,
message: 'Cannot start task for a closed WorkItem',
@@ -522,16 +616,29 @@ export async function workitemsRoutes(server: FastifyInstance) {
// Get user message from request body if provided (for conversation messages)
const userMessage = request.body?.message;
- // Execute task: initialize workspace and start agent
- const result = await agentService.executeTask(
- workItem.projectId,
- workItem.id,
- workItem.title,
- workItem.body || undefined,
- userMessage
- );
+ // Emit workitem.task.start event to trigger workflow
+ // The workflow will handle starting the agent run via AgentNodeExecutor
+ console.log(`[workitemsRoutes] Emitting workitem.task.start event for ${workItem.id}`);
+
+ await workflowEventBus.emit({
+ eventId: crypto.randomUUID(),
+ at: new Date().toISOString(),
+ subject: { kind: 'workitem', id: workItem.id },
+ type: 'workitem.task.start',
+ workItemId: workItem.id,
+ data: {
+ title: workItem.title,
+ body: workItem.body ?? '',
+ userMessage,
+ },
+ });
- return reply.status(201).send(agentRunToDTO(result.agentRun));
+ // Return a placeholder response - the actual agent run will be created by the workflow
+ // The client should poll for tasks or use SSE to get the actual agent run
+ return reply.status(202).send({
+ message: 'Task start request accepted. Workflow will handle agent execution.',
+ workItemId: workItem.id,
+ });
} catch (error) {
return reply.status(400).send({
error: true,
@@ -561,8 +668,17 @@ export async function workitemsRoutes(server: FastifyInstance) {
try {
// Refresh head_sha using workspace service
- const updatedWorkItem = await workspaceService.refreshHeadSha(workItem);
- return reply.status(200).send(workItemToDTO(updatedWorkItem));
+ if (!workItem.worktreePath) {
+ return reply.status(400).send({
+ error: true,
+ message: 'WorkItem has no worktree initialized',
+ });
+ }
+ const headSha = await workspaceService.refreshHeadSha(workItem.worktreePath);
+ const updatedWorkItem = await workItemsRepository.update(workItem.id, {
+ headSha,
+ });
+ return reply.status(200).send(workItemToDTO(updatedWorkItem!));
} catch (error) {
return reply.status(500).send({
error: true,
diff --git a/backend/src/server.ts b/backend/src/server.ts
index 5240b02..a0917e4 100644
--- a/backend/src/server.ts
+++ b/backend/src/server.ts
@@ -1,40 +1,288 @@
import { createServer } from './middleware/setup.js';
import { getDb } from './db/client.js';
import { projectsRoutes } from './routes/projects.js';
-import { targetReposRoutes } from './routes/targetRepos.js';
import { pullRequestsRoutes } from './routes/pullRequests.js';
import { agentRunsRoutes } from './routes/agentRuns.js';
import { reviewRoutes } from './routes/reviews.js';
import { workitemsRoutes } from './routes/workitems.js';
+import { workflowRoutes } from './routes/workflows.js';
+import { searchRoutes } from './routes/search.js';
+import { settingsRoutes } from './routes/settings.js';
import { runMigrations } from './db/migrations.js';
import { modelsCache } from './services/ModelsCache.js';
+import { projectsRepository } from './repositories/ProjectsRepository.js';
+import { workflowsRepository } from './repositories/WorkflowsRepository.js';
+import {
+ createDefaultWorkflow,
+ getDefaultWorkflowVersion,
+ getWorkflowVersion,
+} from './services/workflow/defaultWorkflow.js';
+import { workflowExecutionService } from './services/workflow/WorkflowExecutionService.js';
+import { agentRunRecoveryService } from './services/agent/AgentRunRecoveryService.js';
+
+// Import workflowExecutionService early to ensure event handlers are registered
+import './services/workflow/WorkflowExecutionService.js';
const PORT = parseInt(process.env.PORT || '11031', 10);
const HOST = process.env.HOST || '127.0.0.1';
+/**
+ * Scans all projects and creates/upgrades default workflows
+ * - Creates default workflow if missing
+ * - Upgrades default workflow if version is outdated
+ *
+ * To update the default workflow, simply increment workflowVersion in createDefaultWorkflow()
+ */
+async function ensureDefaultWorkflows() {
+ try {
+ const projects = await projectsRepository.findAll();
+ const CURRENT_WORKFLOW_VERSION = getDefaultWorkflowVersion();
+
+ for (const project of projects) {
+ const currentWorkflowDefinition = createDefaultWorkflow(project.id);
+ const defaultWorkflow = await workflowsRepository.findDefault(project.id);
+
+ if (!defaultWorkflow) {
+ // No default workflow exists, check if a workflow with the same name exists
+ const existingWithSameName = await workflowsRepository.findByName(
+ currentWorkflowDefinition.workflow.name,
+ project.id
+ );
+
+ if (existingWithSameName) {
+ // Workflow with same name exists but isn't default, update it to be default
+ await workflowsRepository.update(existingWithSameName.id, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ isDefault: true,
+ version: CURRENT_WORKFLOW_VERSION,
+ });
+
+ console.log(
+ `Updated existing workflow ${existingWithSameName.id} to default v${CURRENT_WORKFLOW_VERSION} for project: ${project.name} (${project.id})`
+ );
+ } else {
+ // No workflow with this name exists, create it
+ try {
+ await workflowsRepository.create({
+ id: currentWorkflowDefinition.workflow.id,
+ projectId: project.id,
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ isDefault: true,
+ version: CURRENT_WORKFLOW_VERSION,
+ });
+
+ console.log(
+ `Created default workflow v${CURRENT_WORKFLOW_VERSION} for project: ${project.name} (${project.id})`
+ );
+ } catch (error: any) {
+ // If creation fails due to unique constraint, update existing workflow instead
+ if (error?.code === 'SQLITE_CONSTRAINT_UNIQUE' || error?.message?.includes('UNIQUE')) {
+ const existingWithSameName = await workflowsRepository.findByName(
+ currentWorkflowDefinition.workflow.name,
+ project.id
+ );
+
+ if (existingWithSameName) {
+ await workflowsRepository.update(existingWithSameName.id, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ isDefault: true,
+ version: CURRENT_WORKFLOW_VERSION,
+ });
+
+ console.log(
+ `Updated existing workflow ${existingWithSameName.id} to default v${CURRENT_WORKFLOW_VERSION} for project: ${project.name} (${project.id}) due to constraint`
+ );
+ } else {
+ throw error; // Re-throw if we can't handle it
+ }
+ } else {
+ throw error; // Re-throw non-constraint errors
+ }
+ }
+ }
+ } else {
+ // Default workflow exists, check version
+ // Get version from database column first, fallback to definition
+ const dbVersion =
+ defaultWorkflow.version || getWorkflowVersion(defaultWorkflow.definition) || 1;
+ // Also check the definition's version to catch cases where column is outdated
+ const definitionVersion = getWorkflowVersion(defaultWorkflow.definition);
+ const needsUpdate =
+ dbVersion < CURRENT_WORKFLOW_VERSION || definitionVersion < CURRENT_WORKFLOW_VERSION;
+
+ if (needsUpdate) {
+ // Version is outdated, update existing workflow
+ const oldId = defaultWorkflow.id;
+ const newId = currentWorkflowDefinition.workflow.id;
+
+ // If ID changed (due to version change), handle migration carefully
+ if (oldId !== newId) {
+ // Check if workflow with new ID already exists
+ const existingWithNewId = await workflowsRepository.findById(newId);
+
+ if (existingWithNewId) {
+ // New ID already exists, update it
+ await workflowsRepository.update(newId, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ version: CURRENT_WORKFLOW_VERSION,
+ isDefault: true,
+ });
+
+ // Preserve old workflow for traceability: mark it as non-default instead of deleting
+ if (oldId !== newId) {
+ await workflowsRepository.update(oldId, {
+ isDefault: false,
+ });
+ }
+ } else {
+ // Check if there's already a workflow with the same name for this project
+ // (to avoid unique constraint violation on project_id + name)
+ const existingWithSameName = await workflowsRepository.findByName(
+ currentWorkflowDefinition.workflow.name,
+ project.id
+ );
+
+ if (existingWithSameName && existingWithSameName.id !== oldId) {
+ // Update the existing workflow with same name instead of creating new
+ await workflowsRepository.update(existingWithSameName.id, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ version: CURRENT_WORKFLOW_VERSION,
+ isDefault: true,
+ });
+
+ // Preserve old workflow: mark it as non-default instead of deleting
+ await workflowsRepository.update(oldId, {
+ isDefault: false,
+ });
+ } else {
+ // Safe to create new default workflow while preserving the old one
+ // First mark old workflow as non-default
+ await workflowsRepository.update(oldId, {
+ isDefault: false,
+ });
+
+ try {
+ await workflowsRepository.create({
+ id: newId,
+ projectId: project.id,
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ isDefault: true,
+ version: CURRENT_WORKFLOW_VERSION,
+ });
+ } catch (error: any) {
+ // If creation fails due to unique constraint, update existing workflow instead
+ if (
+ error?.code === 'SQLITE_CONSTRAINT_UNIQUE' ||
+ error?.message?.includes('UNIQUE')
+ ) {
+ const existingWithSameName = await workflowsRepository.findByName(
+ currentWorkflowDefinition.workflow.name,
+ project.id
+ );
+
+ if (existingWithSameName) {
+ await workflowsRepository.update(existingWithSameName.id, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ version: CURRENT_WORKFLOW_VERSION,
+ isDefault: true,
+ });
+
+ // Preserve old workflow if it's a different record by marking it non-default
+ if (existingWithSameName.id !== oldId) {
+ await workflowsRepository.update(oldId, {
+ isDefault: false,
+ });
+ }
+ } else {
+ throw error; // Re-throw if we can't handle it
+ }
+ } else {
+ throw error; // Re-throw non-constraint errors
+ }
+ }
+ }
+ }
+ } else {
+ // Same ID, just update the definition
+ await workflowsRepository.update(oldId, {
+ name: currentWorkflowDefinition.workflow.name,
+ definition: currentWorkflowDefinition,
+ version: CURRENT_WORKFLOW_VERSION,
+ isDefault: true,
+ });
+ }
+ } else if (dbVersion === CURRENT_WORKFLOW_VERSION) {
+ // Version matches, but ensure definition is up-to-date (in case of hotfixes)
+ const existingDefinition =
+ typeof defaultWorkflow.definition === 'string'
+ ? JSON.parse(defaultWorkflow.definition)
+ : defaultWorkflow.definition;
+
+ // Compare workflow IDs to detect changes
+ if (existingDefinition.workflow.id !== currentWorkflowDefinition.workflow.id) {
+ // Workflow ID changed, update it
+ await workflowsRepository.update(defaultWorkflow.id, {
+ definition: currentWorkflowDefinition,
+ version: CURRENT_WORKFLOW_VERSION,
+ });
+
+ console.log(
+ `Updated default workflow definition for project: ${project.name} (${project.id}) to match v${CURRENT_WORKFLOW_VERSION}`
+ );
+ }
+ }
+ }
+ }
+ } catch (error) {
+ console.error('Failed to ensure default workflows:', error);
+ // Don't throw - allow server to start even if this fails
+ }
+}
+
async function start() {
const server = await createServer();
// Run database migrations on startup
await runMigrations();
- // Initialize models cache in the background
+ // Ensure all projects have default workflows
+ await ensureDefaultWorkflows();
+
+ // Initialize models cache in background
// This runs asynchronously and doesn't block server startup
// Initialize cache for both available agents
void modelsCache.initialize('opencode');
void modelsCache.initialize('claudecode');
+ // Recover interrupted workflow runs on startup
+ void workflowExecutionService.recoverInterruptedRuns();
+
+ // Recover interrupted agent runs on startup
+ void agentRunRecoveryService.recoverInterruptedRuns();
+
+ // Start event outbox processor
+ console.log('[Server] Event outbox processor started');
+
server.get('/health', async () => {
await getDb();
return { status: 'ok', timestamp: new Date().toISOString() };
});
await server.register(projectsRoutes);
- await server.register(targetReposRoutes);
await server.register(pullRequestsRoutes);
await server.register(agentRunsRoutes);
await server.register(reviewRoutes);
await server.register(workitemsRoutes);
+ await server.register(workflowRoutes);
+ await server.register(searchRoutes);
+ await server.register(settingsRoutes);
try {
await server.listen({ port: PORT, host: HOST });
diff --git a/backend/src/services/DomainDispatcher.ts b/backend/src/services/DomainDispatcher.ts
new file mode 100644
index 0000000..e3f8cd7
--- /dev/null
+++ b/backend/src/services/DomainDispatcher.ts
@@ -0,0 +1,421 @@
+/**
+ * DomainDispatcher - Dispatcher for Domain resources (system-internal concepts)
+ *
+ * Domain Resources:
+ * - WorkItem: create/update workitem fields
+ * - Task: create/start/complete tasks; emits domain events (task.created/task.started/task.completed)
+ * - PullRequest: create/update PR record
+ *
+ * Domain resource semantics:
+ * - resource.result.status == succeeded means the requested state transition completed
+ * - No long-running external execution implied
+ * - State transitions are synchronous within the workflow
+ */
+
+import type { WorkItem, NodeRun } from '../types/models';
+import type { ResourceType } from 'git-vibe-shared';
+import {
+ RESOURCE_STATUS_SUCCEEDED,
+ RESOURCE_STATUS_FAILED,
+ RESOURCE_STATUS_CANCELED,
+ PR_STATUS_MERGED,
+} from 'git-vibe-shared';
+import { workItemsRepository } from '../repositories/WorkItemsRepository';
+import { agentRunsRepository } from '../repositories/AgentRunsRepository';
+import { pullRequestsRepository } from '../repositories/PullRequestsRepository';
+import { tasksRepository } from '../repositories/TasksRepository';
+import { prService } from './PRService';
+import * as crypto from 'node:crypto';
+import type { ResourceHandlerContext } from './ResourceDispatcher.js';
+
+export interface ResourceResult {
+ resourceType: ResourceType;
+ resourceId: string;
+ status:
+ | typeof RESOURCE_STATUS_SUCCEEDED
+ | typeof RESOURCE_STATUS_FAILED
+ | typeof RESOURCE_STATUS_CANCELED;
+ summary: string;
+ outputs: Record;
+}
+
+export interface ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean;
+ execute(context: ResourceHandlerContext): Promise;
+}
+
+class WorkItemResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'WorkItem';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input, nodeRun } = context;
+
+ if (input.ensureTasks && Array.isArray(input.ensureTasks)) {
+ const taskHandler = new TaskResourceHandler();
+ const createdTaskIds: string[] = [];
+ const existingTaskIds: string[] = [];
+ const autoStartTaskIds: string[] = [];
+
+ for (const taskSpec of input.ensureTasks) {
+ const existingTask = await tasksRepository.findByTaskType(workItem.id, taskSpec.taskType);
+
+ if (existingTask) {
+ existingTaskIds.push(existingTask.id);
+ createdTaskIds.push(existingTask.id);
+ console.log(
+ `[WorkItemResourceHandler] Task ${taskSpec.taskType} already exists: ${existingTask.id}, status: ${existingTask.status}`
+ );
+
+ if (existingTask.status === 'pending' && taskSpec.autoStart) {
+ autoStartTaskIds.push(existingTask.id);
+ }
+ } else {
+ const taskInput = {
+ id: crypto.randomUUID(),
+ taskType: taskSpec.taskType,
+ status: 'pending',
+ input: taskSpec.input || {},
+ idempotencyKey: `workitem:${workItem.id}:task:${taskSpec.taskType}:create`,
+ };
+
+ const taskResult = await taskHandler.execute({
+ workItem,
+ nodeRun,
+ input: taskInput,
+ });
+
+ createdTaskIds.push(taskResult.resourceId);
+ }
+ }
+
+ // Return task IDs in outputs for event emission by nodes
+ return {
+ resourceType: 'WorkItem',
+ resourceId: workItem.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `WorkItem ${workItem.id} processed ${createdTaskIds.length} tasks`,
+ outputs: {
+ createdTaskIds,
+ existingTaskIds,
+ autoStartTaskIds,
+ },
+ };
+ }
+
+ if (input.ensurePRRequest) {
+ const existingPR = await pullRequestsRepository.findByWorkItemId(workItem.id);
+ if (!existingPR && workItem.headBranch && workItem.baseBranch) {
+ const pr = await prService.openPR(
+ workItem.id,
+ workItem.projectId,
+ workItem.title,
+ workItem.body,
+ workItem.headBranch,
+ workItem.baseBranch
+ );
+
+ if (pr) {
+ console.log(`[WorkItemResourceHandler] Created PR request ${pr.id}`);
+ } else {
+ console.log(`[WorkItemResourceHandler] No changes detected, skipping PR creation`);
+ }
+ }
+ }
+
+ const updateData = Object.fromEntries(
+ Object.entries(input).filter(
+ ([key, value]) =>
+ value !== undefined &&
+ value !== null &&
+ key !== 'ensureTasks' &&
+ key !== 'ensurePRRequest'
+ )
+ );
+
+ if (Object.keys(updateData).length > 0) {
+ await workItemsRepository.update(workItem.id, updateData);
+ }
+
+ return {
+ resourceType: 'WorkItem',
+ resourceId: workItem.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary:
+ Object.keys(updateData).length > 0
+ ? `WorkItem ${workItem.id} updated`
+ : `WorkItem ${workItem.id} (no changes)`,
+ outputs: {},
+ };
+ }
+}
+
+class TaskResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'Task';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input, nodeRun } = context;
+
+ if (input.taskId) {
+ const taskId = input.taskId as string;
+ const existingTask = await tasksRepository.findById(taskId);
+
+ if (!existingTask) {
+ throw new Error(`Task ${taskId} not found`);
+ }
+
+ if (input.patch) {
+ const patch = input.patch as Record;
+ const updates: Partial<{
+ status: 'pending' | 'running' | 'succeeded' | 'failed' | 'canceled' | 'blocked';
+ output: Record;
+ currentAgentRunId: string | null;
+ }> = {};
+
+ if (patch.status) {
+ updates.status = patch.status as any;
+ }
+ if (patch.output) {
+ updates.output = patch.output as Record;
+ }
+ if (patch.currentAgentRunId !== undefined) {
+ updates.currentAgentRunId = patch.currentAgentRunId as string | null;
+ }
+
+ const updatedTask = await tasksRepository.update(taskId, updates);
+
+ return {
+ resourceType: 'Task',
+ resourceId: taskId,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Task ${taskId} updated`,
+ outputs: {
+ taskId: taskId,
+ status: updatedTask?.status,
+ },
+ };
+ }
+
+ if (input.completeFromAgentRunId) {
+ const agentRunId = input.completeFromAgentRunId as string;
+ const agentRun = await agentRunsRepository.findById(agentRunId);
+
+ if (!agentRun) {
+ throw new Error(`AgentRun ${agentRunId} not found`);
+ }
+
+ const taskStatus =
+ agentRun.status === 'succeeded'
+ ? 'succeeded'
+ : agentRun.status === 'failed' || agentRun.status === 'cancelled'
+ ? 'failed'
+ : existingTask.status;
+
+ await tasksRepository.update(taskId, {
+ status: taskStatus,
+ currentAgentRunId: agentRunId,
+ output: {
+ agentRunId: agentRunId,
+ agentRunStatus: agentRun.status,
+ },
+ });
+
+ return {
+ resourceType: 'Task',
+ resourceId: taskId,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Task ${taskId} completed from AgentRun ${agentRunId}`,
+ outputs: {
+ taskId: taskId,
+ status: taskStatus,
+ agentRunId: agentRunId,
+ },
+ };
+ }
+ }
+
+ const taskId = input.id || crypto.randomUUID();
+ const taskType = input.taskType as string;
+ const idempotencyKey = input.idempotencyKey as string | undefined;
+
+ if (idempotencyKey) {
+ const existing = await tasksRepository.findByIdempotencyKey(idempotencyKey);
+ if (existing) {
+ return {
+ resourceType: 'Task',
+ resourceId: existing.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Task ${existing.id} already exists (idempotent)`,
+ outputs: {
+ taskId: existing.id,
+ status: existing.status,
+ autoStart: false,
+ },
+ };
+ }
+ }
+
+ const existingTask = await tasksRepository.findByTaskType(workItem.id, taskType);
+ if (existingTask && existingTask.status !== 'succeeded' && existingTask.status !== 'failed') {
+ return {
+ resourceType: 'Task',
+ resourceId: existingTask.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Task ${existingTask.id} already exists`,
+ outputs: {
+ taskId: existingTask.id,
+ status: existingTask.status,
+ autoStart: false,
+ },
+ };
+ }
+
+ const task = await tasksRepository.create({
+ id: taskId,
+ workItemId: workItem.id,
+ taskType: taskType,
+ status: input.status || 'pending',
+ input: input.input || {},
+ output: input.output || {},
+ idempotencyKey: idempotencyKey || null,
+ nodeRunId: nodeRun.runId,
+ });
+
+ return {
+ resourceType: 'Task',
+ resourceId: task.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Task ${task.id} created`,
+ outputs: {
+ taskId: task.id,
+ status: task.status,
+ autoStart: input.autoStart || false,
+ },
+ };
+ }
+}
+
+class PullRequestResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'PullRequest';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input } = context;
+
+ if (input.operation === 'merge') {
+ const pr = await pullRequestsRepository.findByWorkItemId(workItem.id);
+ if (!pr) {
+ throw new Error(`No PR found for WorkItem ${workItem.id}`);
+ }
+
+ const { projectsRepository } = await import('../repositories/ProjectsRepository');
+ const project = await projectsRepository.findById(workItem.projectId);
+ if (!project) {
+ throw new Error(`Project ${workItem.projectId} not found`);
+ }
+
+ const strategy = (input.strategy as 'merge' | 'squash' | 'rebase') || 'squash';
+ const mergedPR = await prService.mergePR(pr, workItem, project, strategy);
+
+ return {
+ resourceType: 'PullRequest',
+ resourceId: mergedPR.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `PullRequest ${mergedPR.id} merged using ${strategy} strategy`,
+ outputs: {
+ prId: mergedPR.id,
+ prNumber: mergedPR.id,
+ merged: true,
+ mergeCommitSha: mergedPR.mergeCommitSha,
+ },
+ };
+ }
+
+ if (!workItem.headBranch) {
+ throw new Error(`WorkItem ${workItem.id} has no head branch`);
+ }
+
+ const title = input.titleFrom ? workItem.title : input.title || workItem.title;
+ const description =
+ input.bodyFrom || input.descriptionFrom
+ ? workItem.body
+ : input.description || workItem.body || undefined;
+
+ const headBranch = input.head || workItem.headBranch;
+ const baseBranch = input.base || workItem.baseBranch;
+
+ const pr = await prService.openPR(
+ workItem.id,
+ workItem.projectId,
+ title,
+ description,
+ headBranch,
+ baseBranch
+ );
+
+ if (!pr) {
+ return {
+ resourceType: 'PullRequest',
+ resourceId: workItem.id,
+ status: RESOURCE_STATUS_FAILED,
+ summary: 'No changes detected, skipping PR creation',
+ outputs: {
+ skipped: true,
+ reason: 'no_diff',
+ },
+ };
+ }
+
+ return {
+ resourceType: 'PullRequest',
+ resourceId: pr.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `PullRequest ${pr.id} opened`,
+ outputs: {
+ prId: pr.id,
+ prNumber: pr.id,
+ url: '',
+ merged: pr.status === PR_STATUS_MERGED,
+ },
+ };
+ }
+}
+
+export class DomainDispatcher {
+ private handlers: Map;
+
+ constructor() {
+ this.handlers = new Map();
+ this.registerHandlers();
+ }
+
+ private registerHandlers(): void {
+ this.handlers.set('WorkItem', new WorkItemResourceHandler());
+ this.handlers.set('Task', new TaskResourceHandler());
+ this.handlers.set('PullRequest', new PullRequestResourceHandler());
+ }
+
+ async call(
+ resourceType: ResourceType,
+ _input: Record,
+ context: ResourceHandlerContext
+ ): Promise {
+ const handler = this.handlers.get(resourceType);
+ if (!handler) {
+ throw new Error(`No Domain handler for resource type: ${resourceType}`);
+ }
+
+ return handler.execute(context);
+ }
+
+ canHandle(resourceType: ResourceType): boolean {
+ const domainResources: ResourceType[] = ['WorkItem', 'Task', 'PullRequest'];
+ return domainResources.includes(resourceType);
+ }
+}
+
+export const domainDispatcher = new DomainDispatcher();
diff --git a/backend/src/services/EventOutbox.ts b/backend/src/services/EventOutbox.ts
new file mode 100644
index 0000000..f379d8f
--- /dev/null
+++ b/backend/src/services/EventOutbox.ts
@@ -0,0 +1,143 @@
+/**
+ * EventOutbox - Transactional outbox pattern for reliable event delivery
+ *
+ * Ensures events are written in the same DB transaction as resource updates,
+ * then dispatched asynchronously to the event bus.
+ */
+
+import type { WorkflowEvent } from 'git-vibe-shared';
+import { workflowEventBus } from './workflow/WorkflowEventBus.js';
+import { getDb } from '../db/client.js';
+import { eq, isNull, asc } from 'drizzle-orm';
+import { eventOutbox } from '../models/schema.js';
+import { v4 as uuidv4 } from 'uuid';
+
+/**
+ * EventOutboxService - Manages transactional outbox
+ */
+export class EventOutboxService {
+ private processingInterval: NodeJS.Timeout | null = null;
+ private isProcessing = false;
+
+ constructor() {
+ // Start background processor
+ this.startProcessor();
+ }
+
+ /**
+ * Add event to outbox (should be called within a transaction)
+ */
+ async addEvent(event: WorkflowEvent): Promise {
+ const db = await getDb();
+
+ await db.insert(eventOutbox).values({
+ id: uuidv4(),
+ eventId: event.eventId,
+ eventType: event.type,
+ eventData: JSON.stringify(event.data),
+ subjectKind: event.subject.kind,
+ subjectId: event.subject.id,
+ resourceVersion: event.resourceVersion ?? null,
+ causedBy: event.causedBy ? JSON.stringify(event.causedBy) : null,
+ createdAt: new Date(),
+ retryCount: 0,
+ });
+ }
+
+ /**
+ * Process outbox events and dispatch to event bus
+ */
+ async processOutbox(): Promise {
+ if (this.isProcessing) {
+ return;
+ }
+
+ this.isProcessing = true;
+
+ try {
+ const db = await getDb();
+
+ // Fetch unprocessed events (limit to avoid overwhelming)
+ const events = await db
+ .select()
+ .from(eventOutbox)
+ .where(isNull(eventOutbox.processedAt))
+ .orderBy(asc(eventOutbox.createdAt))
+ .limit(100);
+
+ for (const outboxEvent of events) {
+ try {
+ // Reconstruct event
+ const event: WorkflowEvent = {
+ eventId: outboxEvent.eventId,
+ type: outboxEvent.eventType,
+ at: outboxEvent.createdAt.toISOString(),
+ subject: {
+ kind: outboxEvent.subjectKind as any,
+ id: outboxEvent.subjectId,
+ },
+ resourceVersion: outboxEvent.resourceVersion ?? undefined,
+ causedBy: outboxEvent.causedBy ? JSON.parse(outboxEvent.causedBy) : undefined,
+ data: JSON.parse(outboxEvent.eventData),
+ };
+
+ // Dispatch to event bus
+ await workflowEventBus.emit(event);
+
+ // Mark as processed
+ await db
+ .update(eventOutbox)
+ .set({ processedAt: new Date() })
+ .where(eq(eventOutbox.id, outboxEvent.id));
+ } catch (error) {
+ console.error(`[EventOutbox] Error processing event ${outboxEvent.id}:`, error);
+
+ // Increment retry count
+ await db
+ .update(eventOutbox)
+ .set({ retryCount: outboxEvent.retryCount + 1 })
+ .where(eq(eventOutbox.id, outboxEvent.id));
+
+ // If retry count exceeds threshold, mark as failed
+ if (outboxEvent.retryCount >= 10) {
+ await db
+ .update(eventOutbox)
+ .set({ processedAt: new Date() })
+ .where(eq(eventOutbox.id, outboxEvent.id));
+ console.error(
+ `[EventOutbox] Event ${outboxEvent.id} exceeded retry limit, marking as failed`
+ );
+ }
+ }
+ }
+ } catch (error) {
+ console.error('[EventOutbox] Error processing outbox:', error);
+ } finally {
+ this.isProcessing = false;
+ }
+ }
+
+ /**
+ * Start background processor
+ */
+ private startProcessor(): void {
+ // Process every 1 second
+ this.processingInterval = setInterval(() => {
+ this.processOutbox().catch((error) => {
+ console.error('[EventOutbox] Error in background processor:', error);
+ });
+ }, 1000);
+ }
+
+ /**
+ * Stop background processor
+ */
+ stopProcessor(): void {
+ if (this.processingInterval) {
+ clearInterval(this.processingInterval);
+ this.processingInterval = null;
+ }
+ }
+}
+
+export const eventOutboxService = new EventOutboxService();
diff --git a/backend/src/services/GitRelayService.ts b/backend/src/services/GitRelayService.ts
deleted file mode 100644
index 194679a..0000000
--- a/backend/src/services/GitRelayService.ts
+++ /dev/null
@@ -1,94 +0,0 @@
-import fs from 'node:fs/promises';
-import path from 'node:path';
-
-/**
- * Service for Git relay repository operations
- */
-export class GitRelayService {
- constructor(
- private execCommand: (command: string, cwd: string) => string,
- private getDefaultBranch: (repoPath: string) => string
- ) {}
-
- async createRelayRepo(
- sourceRepoPath: string,
- relayRepoPath: string,
- branch?: string
- ): Promise {
- // Create the relay repo directory
- await fs.mkdir(relayRepoPath, { recursive: true });
-
- // Copy the .git directory from source to relay repo
- const sourceGitDir = path.join(sourceRepoPath, '.git');
- const relayGitDir = path.join(relayRepoPath, '.git');
-
- // Use recursive copy for .git directory
- await fs.cp(sourceGitDir, relayGitDir, { recursive: true, force: true });
-
- // Use provided branch or get the default branch from source repo
- const defaultBranch = branch || this.getDefaultBranch(sourceRepoPath);
-
- // Checkout the default branch in the relay repo
- this.execCommand(`git checkout ${defaultBranch}`, relayRepoPath);
-
- // Reset the working tree to restore files from the git history
- this.execCommand('git reset --hard HEAD', relayRepoPath);
- this.execCommand('git clean -fd', relayRepoPath);
-
- // Remove upstream remote URL to prevent accidental pushes to the original repository
- try {
- this.execCommand('git remote remove origin', relayRepoPath);
- } catch {
- // Origin remote may not exist, continue silently
- }
- }
-
- async syncRelayToSource(
- relayRepoPath: string,
- sourceRepoPath: string,
- projectName: string
- ): Promise {
- // Get the default branch from source repo
- const defaultBranch = this.getDefaultBranch(sourceRepoPath);
-
- // Switch to or create the relay branch
- const relayBranch = `relay-${projectName}`;
- try {
- // Try to checkout the relay branch
- this.execCommand(`git checkout ${relayBranch}`, sourceRepoPath);
- } catch {
- // Branch doesn't exist, create it from default branch
- this.execCommand(`git checkout -b ${relayBranch} ${defaultBranch}`, sourceRepoPath);
- }
-
- // Copy all files from relay repo to source repo (excluding .git directory)
- const relayFiles = await fs.readdir(relayRepoPath);
- for (const file of relayFiles) {
- if (file !== '.git') {
- const srcPath = path.join(relayRepoPath, file);
- const destPath = path.join(sourceRepoPath, file);
- const srcStat = await fs.stat(srcPath);
- if (srcStat.isDirectory()) {
- await fs.cp(srcPath, destPath, { recursive: true, force: true });
- } else {
- await fs.copyFile(srcPath, destPath);
- }
- }
- }
-
- // Stage all changes
- this.execCommand('git add -A', sourceRepoPath);
-
- // Check if there are changes to commit
- const status = this.execCommand('git status --porcelain', sourceRepoPath).trim();
- if (status.length > 0) {
- const commitMessage = `GitVibe sync from relay repo: ${new Date().toISOString()}`;
- this.execCommand(`git commit -m "${commitMessage}"`, sourceRepoPath);
- // Return the commit SHA
- return this.execCommand('git rev-parse HEAD', sourceRepoPath).trim();
- }
-
- // No changes, return null
- return null;
- }
-}
diff --git a/backend/src/services/ModelsCache.ts b/backend/src/services/ModelsCache.ts
index 3c68049..da904a8 100644
--- a/backend/src/services/ModelsCache.ts
+++ b/backend/src/services/ModelsCache.ts
@@ -1,4 +1,6 @@
-import type { AgentModel } from './AgentAdapter.js';
+import type { AgentModel } from './agent/AgentAdapter.js';
+import { openCodeAgentAdapter } from './agent/OpenCodeAgentAdapter.js';
+import { claudeCodeAgentAdapter } from './agent/ClaudeCodeAgentAdapter.js';
export type AgentKey = 'opencode' | 'claudecode';
@@ -28,10 +30,8 @@ class ModelsCache {
try {
let adapter;
if (agent === 'opencode') {
- const { openCodeAgentAdapter } = await import('./OpenCodeAgentAdapter.js');
adapter = openCodeAgentAdapter;
} else if (agent === 'claudecode') {
- const { claudeCodeAgentAdapter } = await import('./ClaudeCodeAgentAdapter.js');
adapter = claudeCodeAgentAdapter;
} else {
throw new Error(`Unknown agent: ${agent}`);
diff --git a/backend/src/services/OpsDispatcher.ts b/backend/src/services/OpsDispatcher.ts
new file mode 100644
index 0000000..8fb8500
--- /dev/null
+++ b/backend/src/services/OpsDispatcher.ts
@@ -0,0 +1,856 @@
+/**
+ * OpsDispatcher - Dispatcher for Op resources (system-external actions/resources)
+ *
+ * Op Resources:
+ * - Worktree: checkout/init; completes synchronously via callback
+ * - AgentRun: completes via callback when run finishes (callback stored, invoked from AgentService.finalizeAgentRun)
+ * - GitOps: commit/push/merge primitives
+ * - CommandExec: run commands
+ *
+ * Op resource semantics:
+ * - resource.result.status == succeeded means the external operation completed successfully
+ * - Not merely "started"
+ * - Often long-running, asynchronous
+ */
+
+import type { WorkItem, NodeRun } from '../types/models';
+import type { ResourceType } from 'git-vibe-shared';
+import {
+ RESOURCE_STATUS_SUCCEEDED,
+ RESOURCE_STATUS_FAILED,
+ RESOURCE_STATUS_CANCELED,
+} from 'git-vibe-shared';
+import { workItemsRepository } from '../repositories/WorkItemsRepository';
+import { agentRunsRepository } from '../repositories/AgentRunsRepository';
+import { projectsRepository } from '../repositories/ProjectsRepository';
+import { tasksRepository } from '../repositories/TasksRepository';
+import { worktreesRepository } from '../repositories/WorktreesRepository';
+import { gitOpsRepository } from '../repositories/GitOpsRepository';
+import { workspaceService } from './WorkspaceService';
+import { agentService } from './agent/AgentService';
+import { gitService } from './git/GitService.js';
+import { getDb } from '../db/client.js';
+import { commandExecs } from '../models/schema.js';
+import { eq } from 'drizzle-orm';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import crypto from 'node:crypto';
+import { promises as fs } from 'node:fs';
+import path from 'node:path';
+import { STORAGE_CONFIG } from '../config/storage.js';
+import type { CompleteFn } from './ResourceDispatcher.js';
+
+/**
+ * Registry to store completion callbacks for async Op resources (like AgentRun)
+ * Key: agentRunId, Value: completion callback
+ */
+const agentRunCompletionCallbacks = new Map();
+
+/**
+ * Store completion callback for an AgentRun
+ * Called by AgentRunResourceHandler when starting an agent run
+ */
+export function storeAgentRunCompletionCallback(agentRunId: string, complete: CompleteFn): void {
+ agentRunCompletionCallbacks.set(agentRunId, complete);
+}
+
+/**
+ * Get and remove completion callback for an AgentRun
+ * Called by AgentService.finalizeAgentRun() when the agent completes
+ */
+export function getAndRemoveAgentRunCompletionCallback(agentRunId: string): CompleteFn | undefined {
+ const callback = agentRunCompletionCallbacks.get(agentRunId);
+ agentRunCompletionCallbacks.delete(agentRunId);
+ return callback;
+}
+
+export interface ResourceResult {
+ resourceType: ResourceType;
+ resourceId: string;
+ status:
+ | typeof RESOURCE_STATUS_SUCCEEDED
+ | typeof RESOURCE_STATUS_FAILED
+ | typeof RESOURCE_STATUS_CANCELED;
+ summary: string;
+ outputs: Record;
+}
+
+export interface ResourceHandlerContext {
+ workItem: WorkItem;
+ nodeRun: NodeRun;
+ input: Record;
+ /** Set by ResourceDispatcher for async Op resources (e.g. AgentRun) to complete when run finishes */
+ complete?: CompleteFn;
+}
+
+export interface ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean;
+ execute(context: ResourceHandlerContext): Promise;
+}
+
+class WorktreeResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'Worktree';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input, nodeRun } = context;
+
+ const project = await projectsRepository.findById(workItem.projectId);
+ if (!project) {
+ throw new Error(`Project ${workItem.projectId} not found`);
+ }
+
+ const idempotencyKey = input.idempotencyKey as string | undefined;
+
+ if (idempotencyKey) {
+ const existing = await worktreesRepository.findByIdempotencyKey(idempotencyKey);
+ if (existing) {
+ return {
+ resourceType: 'Worktree',
+ resourceId: existing.id,
+ status:
+ existing.status === 'succeeded' ? RESOURCE_STATUS_SUCCEEDED : RESOURCE_STATUS_FAILED,
+ summary: `Worktree ${existing.id} already exists (idempotent)`,
+ outputs: {
+ path: existing.path,
+ branch: existing.branch,
+ repoSha: existing.repoSha,
+ },
+ };
+ }
+ }
+
+ if (input.removeWorktree === true) {
+ const existingWorktree = await worktreesRepository.findByWorkItemId(workItem.id);
+
+ if (existingWorktree) {
+ await worktreesRepository.updateStatus(existingWorktree.id, 'running');
+
+ if (workItem.worktreePath) {
+ await workspaceService.removeWorktree(workItem, project);
+ }
+
+ await workItemsRepository.update(workItem.id, {
+ worktreePath: undefined,
+ headBranch: undefined,
+ baseBranch: undefined,
+ headSha: undefined,
+ baseSha: undefined,
+ workspaceStatus: 'not_initialized',
+ });
+
+ await worktreesRepository.updateStatus(existingWorktree.id, 'succeeded');
+
+ return {
+ resourceType: 'Worktree',
+ resourceId: existingWorktree.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Worktree removed for workitem ${workItem.id}`,
+ outputs: {
+ path: existingWorktree.path,
+ branch: existingWorktree.branch,
+ },
+ };
+ }
+
+ return {
+ resourceType: 'Worktree',
+ resourceId: workItem.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `No worktree to remove for workitem ${workItem.id}`,
+ outputs: {},
+ };
+ }
+
+ if (input.ensureWorktree === true) {
+ const worktreeId = input.id || crypto.randomUUID();
+
+ const existingWorktree = await worktreesRepository.findByWorkItemId(workItem.id);
+ if (existingWorktree && existingWorktree.status === 'succeeded') {
+ return {
+ resourceType: 'Worktree',
+ resourceId: existingWorktree.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Worktree ${existingWorktree.id} already exists`,
+ outputs: {
+ path: existingWorktree.path,
+ branch: existingWorktree.branch,
+ repoSha: existingWorktree.repoSha,
+ },
+ };
+ }
+
+ const worktree = await worktreesRepository.create({
+ id: worktreeId,
+ workItemId: workItem.id,
+ path: '',
+ branch: '',
+ status: 'running',
+ idempotencyKey: idempotencyKey || null,
+ nodeRunId: nodeRun.runId,
+ });
+
+ try {
+ const workspaceState = await workspaceService.initWorkspace(workItem.id, project);
+
+ await worktreesRepository.update(worktree.id, {
+ path: workspaceState.worktreePath,
+ branch: workspaceState.headBranch,
+ repoSha: workspaceState.headSha,
+ status: 'succeeded',
+ });
+
+ await workItemsRepository.update(workItem.id, {
+ worktreePath: workspaceState.worktreePath,
+ headBranch: workspaceState.headBranch,
+ baseBranch: workspaceState.baseBranch,
+ headSha: workspaceState.headSha,
+ baseSha: workspaceState.baseSha,
+ workspaceStatus: workspaceState.workspaceStatus,
+ });
+
+ return {
+ resourceType: 'Worktree',
+ resourceId: worktree.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Worktree initialized for workitem ${workItem.id}`,
+ outputs: {
+ path: workspaceState.worktreePath,
+ branch: workspaceState.headBranch,
+ repoSha: workspaceState.headSha,
+ },
+ };
+ } catch (error) {
+ await worktreesRepository.updateStatus(worktree.id, 'failed');
+ throw error;
+ }
+ }
+
+ const worktreeId = input.id || crypto.randomUUID();
+ const existingWorktree = await worktreesRepository.findByWorkItemId(workItem.id);
+
+ if (existingWorktree) {
+ await worktreesRepository.update(existingWorktree.id, {
+ path: input.path || existingWorktree.path,
+ branch: input.branch || existingWorktree.branch,
+ repoSha: input.repoSha || existingWorktree.repoSha,
+ status: input.status || existingWorktree.status,
+ });
+
+ return {
+ resourceType: 'Worktree',
+ resourceId: existingWorktree.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Worktree ${existingWorktree.id} updated`,
+ outputs: {
+ path: existingWorktree.path,
+ branch: existingWorktree.branch,
+ repoSha: existingWorktree.repoSha,
+ },
+ };
+ }
+
+ const worktree = await worktreesRepository.create({
+ id: worktreeId,
+ workItemId: workItem.id,
+ path: input.path || workItem.worktreePath || '',
+ branch: input.branch || workItem.headBranch || '',
+ repoSha: input.repoSha || workItem.headSha || null,
+ status: 'succeeded',
+ idempotencyKey: idempotencyKey || null,
+ nodeRunId: nodeRun.runId,
+ });
+
+ return {
+ resourceType: 'Worktree',
+ resourceId: worktree.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Worktree ${worktree.id} created`,
+ outputs: {
+ path: worktree.path,
+ branch: worktree.branch,
+ repoSha: worktree.repoSha,
+ },
+ };
+ }
+}
+
+class AgentRunResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'AgentRun';
+ }
+
+ private async resolveProperty(path: string, context: ResourceHandlerContext): Promise {
+ const { workItem, nodeRun } = context;
+ const parts = path.split('.');
+ if (parts.length === 0) return '';
+
+ const [root, ...rest] = parts;
+
+ if (root === 'workitem' || root === 'workItem') {
+ if (rest.length === 0) return '';
+ const property = rest[0];
+ switch (property) {
+ case 'id':
+ return workItem.id;
+ case 'title':
+ return workItem.title || '';
+ case 'body':
+ case 'description':
+ return workItem.body || '';
+ case 'type':
+ return workItem.type || '';
+ case 'status':
+ return workItem.status || '';
+ default:
+ return '';
+ }
+ }
+
+ if (root === 'task' && rest.length >= 1) {
+ const taskType = rest[0];
+ const previousRuns = await agentRunsRepository.findByWorkItemId(workItem.id);
+
+ const taskRun = previousRuns.find((r) => {
+ const taskData = typeof r.inputJson === 'string' ? JSON.parse(r.inputJson) : r.inputJson;
+ return taskData.taskType === taskType;
+ });
+
+ if (!taskRun) return '';
+
+ if (rest.length === 2 && (rest[1] === 'output' || rest[1] === 'log')) {
+ if (rest[1] === 'log' && taskRun.log) {
+ return taskRun.log;
+ }
+ if (rest[1] === 'output' && taskRun.inputJson) {
+ const inputData =
+ typeof taskRun.inputJson === 'string'
+ ? JSON.parse(taskRun.inputJson)
+ : taskRun.inputJson;
+ if (inputData.prompt) {
+ return inputData.prompt;
+ }
+ }
+ return '';
+ }
+
+ if (taskRun.log) {
+ return taskRun.log;
+ }
+ return '';
+ }
+
+ if (root === 'agentRun' && rest.length >= 1) {
+ const previousRuns = await agentRunsRepository.findByWorkItemId(workItem.id);
+ const completedRuns = previousRuns
+ .filter(
+ (r) => r.status === RESOURCE_STATUS_SUCCEEDED || r.status === RESOURCE_STATUS_FAILED
+ )
+ .sort((a, b) => {
+ const aTime = a.finishedAt ? new Date(a.finishedAt).getTime() : 0;
+ const bTime = b.finishedAt ? new Date(b.finishedAt).getTime() : 0;
+ return bTime - aTime;
+ });
+
+ if (completedRuns.length === 0) return '';
+ const lastRun = completedRuns[0];
+
+ if (rest[0] === 'output' || rest[0] === 'log') {
+ if (rest[0] === 'log' && lastRun.log) {
+ return lastRun.log;
+ }
+ if (rest[0] === 'output' && lastRun.inputJson) {
+ const inputData =
+ typeof lastRun.inputJson === 'string'
+ ? JSON.parse(lastRun.inputJson)
+ : lastRun.inputJson;
+ if (inputData.prompt) {
+ return inputData.prompt;
+ }
+ }
+ return '';
+ }
+
+ return lastRun.log || '';
+ }
+
+ if (root === 'nodeRun' && nodeRun && rest.length >= 1) {
+ const property = rest[0];
+ switch (property) {
+ case 'id':
+ return nodeRun.runId;
+ case 'nodeId':
+ return nodeRun.nodeId;
+ case 'status':
+ return nodeRun.status;
+ case 'input':
+ return JSON.stringify(nodeRun.input || {});
+ case 'output':
+ return JSON.stringify(nodeRun.output || {});
+ default:
+ return '';
+ }
+ }
+
+ return '';
+ }
+
+ private async parseTemplate(template: string, context: ResourceHandlerContext): Promise {
+ const placeholderRegex = /\{\{([^}]+)\}\}/g;
+ let result = template;
+
+ const uniquePlaceholders = new Map();
+ const matches = Array.from(template.matchAll(placeholderRegex));
+
+ for (const match of matches) {
+ const propertyPath = match[1].trim();
+
+ if (!uniquePlaceholders.has(propertyPath)) {
+ const value = await this.resolveProperty(propertyPath, context);
+ uniquePlaceholders.set(propertyPath, value);
+ }
+ }
+
+ for (const [propertyPath, value] of uniquePlaceholders.entries()) {
+ const placeholderPattern = new RegExp(
+ `\\{\\{${propertyPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\}\\}`,
+ 'g'
+ );
+ result = result.replace(placeholderPattern, value);
+ }
+
+ return result;
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input, nodeRun } = context;
+
+ const project = await projectsRepository.findById(workItem.projectId);
+ if (!project) {
+ throw new Error(`Project ${workItem.projectId} not found`);
+ }
+
+ if (!workItem.worktreePath) {
+ throw new Error(`WorkItem ${workItem.id} has no worktree path`);
+ }
+
+ const taskId = input.taskId as string | undefined;
+ if (!taskId) {
+ throw new Error('taskId is required for AgentRun resource');
+ }
+
+ const task = await tasksRepository.findById(taskId);
+ if (!task) {
+ throw new Error(`Task ${taskId} not found`);
+ }
+
+ if (task.workItemId !== workItem.id) {
+ throw new Error(`Task ${taskId} does not belong to WorkItem ${workItem.id}`);
+ }
+
+ const idempotencyKey = input.idempotencyKey as string | undefined;
+ if (idempotencyKey) {
+ const existingRuns = await agentRunsRepository.findByWorkItemId(workItem.id);
+ const existing = existingRuns.find((r) => r.idempotencyKey === idempotencyKey);
+ if (existing) {
+ const terminalStatus =
+ existing.status === 'succeeded'
+ ? RESOURCE_STATUS_SUCCEEDED
+ : existing.status === 'failed'
+ ? RESOURCE_STATUS_FAILED
+ : existing.status === 'cancelled'
+ ? RESOURCE_STATUS_CANCELED
+ : null;
+ const status: ResourceResult['status'] = terminalStatus ?? RESOURCE_STATUS_SUCCEEDED;
+ const result: ResourceResult = {
+ resourceType: 'AgentRun',
+ resourceId: existing.id,
+ status,
+ summary: `AgentRun ${existing.id} already exists (idempotent)`,
+ outputs: {
+ agentRunId: existing.id,
+ sessionId: existing.sessionId,
+ },
+ };
+ if (terminalStatus && context.complete) {
+ await context.complete({
+ resourceType: 'AgentRun',
+ resourceId: existing.id,
+ status: terminalStatus,
+ summary: result.summary,
+ outputs: result.outputs,
+ });
+ }
+ return result;
+ }
+ }
+
+ let prompt: string;
+
+ if (input.prompt) {
+ prompt = input.prompt as string;
+ } else if (input.template) {
+ prompt = await this.parseTemplate(input.template as string, context);
+ } else {
+ prompt = workItem.title;
+ }
+
+ const agentParams = {
+ agentType: input.agentKey || input.agentType || 'opencode',
+ model: input.model,
+ temperature: input.temperature,
+ maxTokens: input.maxTokens,
+ };
+
+ // Session ID comes from trigger input (resolved from event/context), not from a reuse flag
+ const sessionId =
+ typeof input.sessionId === 'string' && input.sessionId.trim() !== ''
+ ? input.sessionId.trim()
+ : undefined;
+
+ const agentRun = await agentService.startAgentRun(
+ workItem.id,
+ project,
+ workItem.worktreePath,
+ prompt,
+ agentParams,
+ {
+ sessionId,
+ linkedAgentRunId: input.linkedAgentRunId,
+ taskId: taskId,
+ idempotencyKey: idempotencyKey,
+ nodeRunId: nodeRun.runId,
+ }
+ );
+
+ await tasksRepository.update(taskId, {
+ currentAgentRunId: agentRun.id,
+ });
+
+ if (context.complete) {
+ storeAgentRunCompletionCallback(agentRun.id, context.complete);
+ }
+
+ return {
+ resourceType: 'AgentRun',
+ resourceId: agentRun.id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `AgentRun ${agentRun.id} started for Task ${taskId}`,
+ outputs: {
+ agentRunId: agentRun.id,
+ taskId: taskId,
+ sessionId: agentRun.sessionId,
+ },
+ };
+ }
+}
+
+class GitOpsResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'GitOps';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, input, nodeRun } = context;
+
+ const project = await projectsRepository.findById(workItem.projectId);
+ if (!project) {
+ throw new Error(`Project ${workItem.projectId} not found`);
+ }
+
+ if (!workItem.worktreePath) {
+ throw new Error(`WorkItem ${workItem.id} has no worktree path`);
+ }
+
+ const operation = input.operation as string;
+ if (!operation) {
+ throw new Error('GitOps operation is required');
+ }
+
+ const idempotencyKey = input.idempotencyKey as string | undefined;
+ const gitOpId = input.id || crypto.randomUUID();
+
+ if (idempotencyKey) {
+ const existing = await gitOpsRepository.findByIdempotencyKey(idempotencyKey);
+ if (existing) {
+ return {
+ resourceType: 'GitOps',
+ resourceId: existing.id,
+ status:
+ existing.status === 'succeeded' ? RESOURCE_STATUS_SUCCEEDED : RESOURCE_STATUS_FAILED,
+ summary: `GitOps ${existing.id} already exists (idempotent)`,
+ outputs: existing.output as Record,
+ };
+ }
+ }
+
+ const gitOp = await gitOpsRepository.create({
+ id: gitOpId,
+ workItemId: workItem.id,
+ operation: operation,
+ status: 'running',
+ input: input,
+ output: {},
+ idempotencyKey: idempotencyKey || null,
+ nodeRunId: nodeRun.runId,
+ });
+
+ try {
+ let commitSha: string | undefined = undefined;
+ let applied = false;
+
+ switch (operation) {
+ case 'commit':
+ if (input.message && workItem.worktreePath) {
+ commitSha = gitService.commitChanges(workItem.worktreePath, input.message as string);
+ applied = true;
+ }
+ break;
+ case 'push':
+ if (workItem.headBranch) {
+ applied = false;
+ }
+ break;
+ case 'merge':
+ if (input.baseBranch && workItem.worktreePath) {
+ gitService.mergeBranch(
+ workItem.worktreePath,
+ input.baseBranch as string,
+ `Merge ${input.baseBranch} into ${workItem.headBranch || 'current branch'}`
+ );
+ commitSha = gitService.getHeadSha(workItem.worktreePath);
+ applied = true;
+ }
+ break;
+ default:
+ throw new Error(`Unknown GitOps operation: ${operation}`);
+ }
+
+ await gitOpsRepository.update(gitOp.id, {
+ status: applied ? 'succeeded' : 'failed',
+ output: {
+ applied,
+ commitSha,
+ operation,
+ },
+ });
+
+ return {
+ resourceType: 'GitOps',
+ resourceId: gitOp.id,
+ status: applied ? RESOURCE_STATUS_SUCCEEDED : RESOURCE_STATUS_FAILED,
+ summary: `Git operation ${operation} ${applied ? 'completed' : 'failed'} for workitem ${workItem.id}`,
+ outputs: {
+ applied,
+ commitSha,
+ operation,
+ },
+ };
+ } catch (error: any) {
+ await gitOpsRepository.update(gitOp.id, {
+ status: 'failed',
+ output: {
+ error: error.message || 'Unknown error',
+ operation,
+ },
+ });
+
+ return {
+ resourceType: 'GitOps',
+ resourceId: gitOp.id,
+ status: RESOURCE_STATUS_FAILED,
+ summary: `Git operation ${operation} failed: ${error.message || 'Unknown error'}`,
+ outputs: {
+ applied: false,
+ error: error.message || 'Unknown error',
+ operation,
+ },
+ };
+ }
+ }
+}
+
+class CommandExecResourceHandler implements ResourceHandler {
+ canHandle(resourceType: ResourceType): boolean {
+ return resourceType === 'CommandExec';
+ }
+
+ async execute(context: ResourceHandlerContext): Promise {
+ const { workItem, nodeRun, input } = context;
+
+ const db = await getDb();
+
+ const commandExecId = input.id || crypto.randomUUID();
+ const idempotencyKey = input.idempotencyKey as string | undefined;
+
+ if (idempotencyKey) {
+ const existing = await db
+ .select()
+ .from(commandExecs)
+ .where(eq(commandExecs.idempotencyKey, idempotencyKey))
+ .limit(1)
+ .execute();
+ if (existing.length > 0) {
+ const existingExec = existing[0];
+ return {
+ resourceType: 'CommandExec',
+ resourceId: existingExec.id,
+ status:
+ existingExec.status === 'succeeded'
+ ? RESOURCE_STATUS_SUCCEEDED
+ : RESOURCE_STATUS_FAILED,
+ summary: `CommandExec ${existingExec.id} already exists (idempotent)`,
+ outputs: {
+ exitCode: existingExec.exitCode || 0,
+ stdoutPath: existingExec.stdoutPath || '',
+ stderrPath: existingExec.stderrPath || '',
+ logPath: existingExec.logPath || '',
+ },
+ };
+ }
+ }
+
+ const logsDir = STORAGE_CONFIG.logsDir;
+ await fs.mkdir(logsDir, { recursive: true });
+
+ const logPath = path.join(logsDir, `command-exec-${commandExecId}.log`);
+ const stdoutPath = path.join(logsDir, `command-exec-${commandExecId}-stdout.log`);
+ const stderrPath = path.join(logsDir, `command-exec-${commandExecId}-stderr.log`);
+
+ await db.insert(commandExecs).values({
+ id: commandExecId,
+ workItemId: workItem.id,
+ nodeRunId: nodeRun.runId,
+ command: input.command || input.steps?.[0]?.run || '',
+ status: 'running',
+ idempotencyKey: idempotencyKey || null,
+ logPath,
+ stdoutPath,
+ stderrPath,
+ startedAt: new Date(),
+ });
+
+ const execAsync = promisify(exec);
+
+ const workingDirectory = input.workingDirectoryRef
+ ? workItem.worktreePath || input.workingDirectoryRef
+ : workItem.worktreePath;
+
+ try {
+ const { stdout, stderr } = await execAsync(input.command || input.steps?.[0]?.run || '', {
+ cwd: workingDirectory,
+ env: { ...process.env, ...input.env },
+ shell: input.shell || 'bash',
+ });
+
+ const exitCode = 0;
+
+ await Promise.all([
+ fs.writeFile(stdoutPath, stdout || '', 'utf-8'),
+ fs.writeFile(stderrPath, stderr || '', 'utf-8'),
+ fs.writeFile(
+ logPath,
+ `Command: ${input.command || input.steps?.[0]?.run || ''}\n\nSTDOUT:\n${stdout || ''}\n\nSTDERR:\n${stderr || ''}\n`,
+ 'utf-8'
+ ),
+ ]);
+
+ await db
+ .update(commandExecs)
+ .set({
+ status: RESOURCE_STATUS_SUCCEEDED,
+ exitCode,
+ completedAt: new Date(),
+ })
+ .where(eq(commandExecs.id, commandExecId));
+
+ return {
+ resourceType: 'CommandExec',
+ resourceId: commandExecId,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: `Command completed successfully`,
+ outputs: {
+ exitCode,
+ stdoutPath,
+ stderrPath,
+ logPath,
+ },
+ };
+ } catch (error: any) {
+ const exitCode = error.code || 1;
+ const errorStderr = error.stderr || error.message || '';
+ const errorStdout = error.stdout || '';
+
+ await Promise.all([
+ fs.writeFile(stdoutPath, errorStdout, 'utf-8').catch(() => {}),
+ fs.writeFile(stderrPath, errorStderr, 'utf-8').catch(() => {}),
+ fs
+ .writeFile(
+ logPath,
+ `Command: ${input.command || input.steps?.[0]?.run || ''}\n\nSTDOUT:\n${errorStdout}\n\nSTDERR:\n${errorStderr}\n`,
+ 'utf-8'
+ )
+ .catch(() => {}),
+ ]);
+
+ await db
+ .update(commandExecs)
+ .set({
+ status: RESOURCE_STATUS_FAILED,
+ exitCode,
+ completedAt: new Date(),
+ })
+ .where(eq(commandExecs.id, commandExecId));
+
+ return {
+ resourceType: 'CommandExec',
+ resourceId: commandExecId,
+ status: 'failed',
+ summary: `Command failed: ${error.message || 'Unknown error'}`,
+ outputs: {
+ exitCode,
+ stdoutPath,
+ stderrPath,
+ logPath,
+ },
+ };
+ }
+ }
+}
+
+export class OpsDispatcher {
+ private handlers: Map;
+
+ constructor() {
+ this.handlers = new Map();
+ this.registerHandlers();
+ }
+
+ private registerHandlers(): void {
+ this.handlers.set('Worktree', new WorktreeResourceHandler());
+ this.handlers.set('AgentRun', new AgentRunResourceHandler());
+ this.handlers.set('GitOps', new GitOpsResourceHandler());
+ this.handlers.set('CommandExec', new CommandExecResourceHandler());
+ }
+
+ async call(
+ resourceType: ResourceType,
+ _input: Record,
+ context: ResourceHandlerContext
+ ): Promise {
+ const handler = this.handlers.get(resourceType);
+ if (!handler) {
+ throw new Error(`No Ops handler for resource type: ${resourceType}`);
+ }
+
+ return handler.execute(context);
+ }
+
+ canHandle(resourceType: ResourceType): boolean {
+ const opResources: ResourceType[] = ['Worktree', 'AgentRun', 'GitOps', 'CommandExec'];
+ return opResources.includes(resourceType);
+ }
+}
+
+export const opsDispatcher = new OpsDispatcher();
diff --git a/backend/src/services/PRService.ts b/backend/src/services/PRService.ts
index a631cbd..0cf3374 100644
--- a/backend/src/services/PRService.ts
+++ b/backend/src/services/PRService.ts
@@ -1,9 +1,11 @@
import { v4 as uuidv4 } from 'uuid';
import path from 'node:path';
+import { PR_STATUS_OPEN, PR_STATUS_MERGED, PR_STATUS_CLOSED } from 'git-vibe-shared';
import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
import { agentRunsRepository } from '../repositories/AgentRunsRepository.js';
-import { gitService } from './GitService.js';
+import { projectsRepository } from '../repositories/ProjectsRepository.js';
+import { gitService } from './git/GitService.js';
import type { WorkItem, PullRequest, Project, AgentRun } from '../types/models.js';
/**
@@ -17,31 +19,82 @@ import type { WorkItem, PullRequest, Project, AgentRun } from '../types/models.j
*/
export class PRService {
/**
- * Open a PR for a WorkItem
+ * Open a PR for a WorkItem (stateless)
* Creates a PullRequest record if one doesn't exist
+ * Does not update WorkItem - workflow will handle state updates
+ * Checks for diffs before creating PR - returns null if no changes
*/
- async openPR(workItem: WorkItem, project: Project): Promise {
+ async openPR(
+ workItemId: string,
+ projectId: string,
+ title: string,
+ description: string | null | undefined,
+ headBranch: string,
+ baseBranch: string
+ ): Promise {
// Check if PR already exists for this WorkItem
- const existingPR = await pullRequestsRepository.findByWorkItemId(workItem.id);
+ const existingPR = await pullRequestsRepository.findByWorkItemId(workItemId);
if (existingPR) {
return existingPR;
}
- // Ensure workspace is initialized
- if (!workItem.worktreePath || !workItem.headBranch || !workItem.baseBranch) {
- throw new Error(`WorkItem ${workItem.id} workspace is not initialized`);
+ // Get workItem to check for diffs
+ const workItem = await workItemsRepository.findById(workItemId);
+ if (!workItem) {
+ throw new Error(`WorkItem ${workItemId} not found`);
+ }
+
+ // Get project to find repo path
+ const project = await projectsRepository.findById(projectId);
+ if (!project) {
+ throw new Error(`Project ${projectId} not found`);
+ }
+
+ // Check if there are any diffs between base and head
+ const repoPath = project.relayRepoPath || project.sourceRepoPath;
+ const worktreePath = workItem.worktreePath || repoPath;
+ const hasDedicatedWorktree = worktreePath && worktreePath !== repoPath && workItem.worktreePath;
+
+ try {
+ // Get base SHA (worktree creation or branch tip)
+ const baseSha = workItem.baseSha || gitService.getRefSha(repoPath, baseBranch);
+ // Use current worktree HEAD when we have a dedicated worktree so agent commits
+ // are included even if workItem.headSha was not yet updated
+ const headSha = hasDedicatedWorktree
+ ? gitService.getHeadSha(worktreePath)
+ : workItem.headSha || gitService.getRefSha(worktreePath, headBranch);
+
+ // Check if there are any changes
+ const diff = gitService.getDiff(baseSha, headSha, repoPath);
+ if (!diff || diff.trim().length === 0) {
+ console.log(
+ `[PRService] No changes detected between ${baseSha} and ${headSha}, skipping PR creation`
+ );
+ return null;
+ }
+
+ // Persist current HEAD so workItem stays in sync for getDiff/getCommits etc.
+ if (hasDedicatedWorktree && headSha !== workItem.headSha) {
+ await workItemsRepository.update(workItemId, { headSha });
+ }
+ } catch (error) {
+ // If we can't check diffs (e.g., branches don't exist yet), still create PR
+ console.warn(
+ `[PRService] Could not check diffs for ${workItemId}, creating PR anyway:`,
+ error instanceof Error ? error.message : String(error)
+ );
}
// Create new PR
const pr = await pullRequestsRepository.create({
id: uuidv4(),
- projectId: project.id,
- workItemId: workItem.id,
- title: workItem.title,
- description: workItem.body || undefined,
- status: 'open',
- sourceBranch: workItem.headBranch,
- targetBranch: workItem.baseBranch,
+ projectId,
+ workItemId,
+ title,
+ description: description || undefined,
+ status: PR_STATUS_OPEN,
+ sourceBranch: headBranch,
+ targetBranch: baseBranch,
mergeStrategy: 'merge',
});
@@ -87,7 +140,7 @@ export class PRService {
* Optimized to only fetch commits that belong to this workitem
*/
async getCommitsWithTasks(
- pr: PullRequest,
+ _pr: PullRequest,
workItem: WorkItem,
project: Project
): Promise<
@@ -260,7 +313,7 @@ export class PRService {
* Get PR statistics (files changed, additions, deletions)
*/
async getStatistics(
- pr: PullRequest,
+ _pr: PullRequest,
workItem: WorkItem,
project: Project
): Promise<{
@@ -291,7 +344,7 @@ export class PRService {
const reasons: string[] = [];
// Check 1: PR status must be open
- if (pr.status !== 'open') {
+ if (pr.status !== PR_STATUS_OPEN) {
reasons.push(`PR is ${pr.status}`);
return { canMerge: false, reasons };
}
@@ -396,7 +449,7 @@ export class PRService {
mergeCommitSha = gitService.getHeadSha(mergePath);
break;
- case 'rebase':
+ case 'rebase': {
// Strategy: rebase
// For rebase, source branch might also be in a worktree
const sourceBranchWorktree = gitService.findWorktreeForBranch(repoPath, pr.sourceBranch);
@@ -408,6 +461,7 @@ export class PRService {
gitService.mergeFFOnly(mergePath, pr.sourceBranch);
mergeCommitSha = gitService.getHeadSha(mergePath);
break;
+ }
default:
throw new Error(`Unknown merge strategy: ${strategy}`);
@@ -415,7 +469,7 @@ export class PRService {
// Update PR status
const updatedPR = await pullRequestsRepository.update(pr.id, {
- status: 'merged',
+ status: PR_STATUS_MERGED,
mergedAt: new Date(),
mergedBy: 'system', // Could be user ID in the future
mergeCommitSha,
@@ -433,7 +487,7 @@ export class PRService {
*/
async closePR(pr: PullRequest): Promise {
const updatedPR = await pullRequestsRepository.update(pr.id, {
- status: 'closed',
+ status: PR_STATUS_CLOSED,
});
if (!updatedPR) {
diff --git a/backend/src/services/PromptBuilder.ts b/backend/src/services/PromptBuilder.ts
deleted file mode 100644
index 1afc605..0000000
--- a/backend/src/services/PromptBuilder.ts
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * PromptBuilder
- *
- * Centralized class for building agent prompts with consistent markdown formatting.
- * Handles different prompt types: task execution, conversation messages, and resume tasks.
- */
-
-export interface PromptParts {
- task?: string;
- description?: string;
- userMessage?: string;
- resumeWith?: string;
-}
-
-/**
- * PromptBuilder class for constructing agent prompts
- */
-export class PromptBuilder {
- /**
- * Build a prompt for task execution from work item
- */
- static buildTaskPrompt(taskTitle: string, description?: string): string {
- if (!description || !description.trim()) {
- return `## Task\n\n${taskTitle}`;
- }
- return `## Task\n\n${taskTitle}\n\n## Description\n\n${description.trim()}`;
- }
-
- /**
- * Build a prompt for conversation messages (user ↔ agent)
- */
- static buildConversationPrompt(userMessage: string): string {
- return `## User Message\n\n${userMessage.trim()}`;
- }
-
- /**
- * Build a prompt for resuming a task
- */
- static buildResumePrompt(
- originalPrompt: string,
- resumeInstructions: string,
- workItemTitle: string
- ): string {
- const parts = this.parsePrompt(originalPrompt, workItemTitle);
-
- let prompt = '';
- if (parts.task) {
- prompt += `## Task\n\n${parts.task}`;
- }
- if (parts.description) {
- prompt += prompt ? '\n\n## Description\n\n' : '';
- prompt += parts.description;
- }
- if (parts.userMessage) {
- prompt += prompt ? '\n\n## User Message\n\n' : '';
- prompt += parts.userMessage;
- }
-
- // Always add resume instructions
- prompt += prompt ? '\n\n## Resume Instructions\n\n' : '## Resume Instructions\n\n';
- prompt += resumeInstructions.trim();
-
- return prompt;
- }
-
- /**
- * Parse an existing prompt to extract its parts (markdown format only)
- */
- private static parsePrompt(originalPrompt: string, fallbackTitle: string): PromptParts {
- const parts: PromptParts = {};
-
- if (!originalPrompt || !originalPrompt.trim()) {
- parts.task = fallbackTitle;
- return parts;
- }
-
- // Parse markdown format: ## Task\n\n...\n\n## Description\n\n...\n\n## User Message\n\n...\n\n## Resume Instructions\n\n...
- const markdownTaskMatch = originalPrompt.match(
- /^##\s+Task\s*\n\n(.+?)(?:\n\n##\s+Description\s*\n\n(.+?))?(?:\n\n##\s+User\s+Message\s*\n\n(.+?))?(?:\n\n##\s+Resume\s+Instructions\s*\n\n(.+?))?$/s
- );
- if (markdownTaskMatch) {
- parts.task = markdownTaskMatch[1]?.trim();
- parts.description = markdownTaskMatch[2]?.trim();
- parts.userMessage = markdownTaskMatch[3]?.trim();
- return parts;
- }
-
- // Try to parse markdown "## User Message" format
- const userMessageMatch = originalPrompt.match(/^##\s+User\s+Message\s*\n\n(.+)$/s);
- if (userMessageMatch) {
- parts.task = fallbackTitle;
- parts.userMessage = userMessageMatch[1]?.trim();
- return parts;
- }
-
- // Fallback: treat entire prompt as description
- parts.task = fallbackTitle;
- parts.description = originalPrompt.trim();
- return parts;
- }
-
- /**
- * Extract task title from a prompt
- */
- static extractTaskTitle(prompt: string, fallbackTitle: string): string {
- const parts = this.parsePrompt(prompt, fallbackTitle);
- return parts.task || fallbackTitle;
- }
-
- /**
- * Extract description from a prompt
- */
- static extractDescription(prompt: string): string | undefined {
- const parts = this.parsePrompt(prompt, '');
- return parts.description || parts.userMessage;
- }
-}
diff --git a/backend/src/services/ResourceDispatcher.ts b/backend/src/services/ResourceDispatcher.ts
new file mode 100644
index 0000000..13347b5
--- /dev/null
+++ b/backend/src/services/ResourceDispatcher.ts
@@ -0,0 +1,231 @@
+/**
+ * ResourceDispatcher - Central dispatcher that routes to DomainDispatcher or OpsDispatcher
+ *
+ * Implements the optimized workflow design exactly as specified:
+ * - Separates Domain resources (WorkItem, Task, PullRequest) from Op resources (Worktree, AgentRun, GitOps, CommandExec)
+ * - Enforces idempotency at NodeRun and Resource levels
+ * - Resources call completion callback (NOT event bus)
+ * - Subject is always business entity (workitem), never synthetic resource_call
+ * - Call signature: call(resourceType, input, causedBy, idempotencyKey, complete) -> Promise
+ *
+ * Per optimized_workflow_design.md:
+ * - Domain Resources: state transitions (no long-running external execution implied)
+ * - Op Resources: external execution (often long-running, asynchronous)
+ * - Resources NEVER emit events - only Nodes emit events
+ */
+
+import type { WorkItem, NodeRun } from '../types/models';
+import type { ResourceType, EventCausedBy } from 'git-vibe-shared';
+import { RESOURCE_STATUS_SUCCEEDED } from 'git-vibe-shared';
+import { workItemsRepository } from '../repositories/WorkItemsRepository';
+import { pullRequestsRepository } from '../repositories/PullRequestsRepository';
+import { tasksRepository } from '../repositories/TasksRepository';
+import { getDb } from '../db/client.js';
+import { nodeRuns, workItems } from '../models/schema.js';
+import { eq, and, ne } from 'drizzle-orm';
+import { domainDispatcher, type ResourceResult } from './DomainDispatcher.js';
+import { opsDispatcher } from './OpsDispatcher.js';
+
+export interface ResourceHandlerContext {
+ workItem: WorkItem;
+ nodeRun: NodeRun;
+ input: Record;
+ complete?: CompleteFn; // Completion callback for async resources (like AgentRun)
+}
+
+/**
+ * Completion callback type for resource completion
+ * Resources call this when they finish (succeeded, failed, or canceled)
+ */
+export type CompleteFn = (outcome: ResourceOutcome) => Promise;
+
+/**
+ * Resource outcome returned to the engine via completion callback
+ */
+export interface ResourceOutcome {
+ resourceType: ResourceType;
+ resourceId: string;
+ status: 'succeeded' | 'failed' | 'canceled';
+ summary?: string;
+ outputs?: Record;
+}
+
+export class ResourceDispatcher {
+ /**
+ * Call a resource with completion callback (matches spec signature exactly)
+ * call(resourceType, input, causedBy, idempotencyKey, complete) -> Promise
+ */
+ async call(
+ resourceType: ResourceType,
+ input: Record,
+ causedBy: EventCausedBy,
+ idempotencyKey: string | undefined,
+ complete: CompleteFn
+ ): Promise {
+ if (!causedBy.workflowRunId || !causedBy.nodeRunId) {
+ throw new Error('causedBy must include workflowRunId and nodeRunId');
+ }
+
+ const db = await getDb();
+
+ const nodeRunRecord = await db
+ .select()
+ .from(nodeRuns)
+ .where(eq(nodeRuns.id, causedBy.nodeRunId))
+ .limit(1);
+
+ if (nodeRunRecord.length === 0) {
+ throw new Error(`NodeRun ${causedBy.nodeRunId} not found`);
+ }
+
+ const nodeRunData = nodeRunRecord[0];
+
+ // Store the called resource type for safety validation on completion
+ await db.update(nodeRuns).set({ resourceType }).where(eq(nodeRuns.id, causedBy.nodeRunId));
+
+ // Check for idempotency - return cached result if exists
+ if (idempotencyKey) {
+ const previousSuccess = await db
+ .select()
+ .from(nodeRuns)
+ .where(
+ and(
+ eq(nodeRuns.workflowRunId, causedBy.workflowRunId!),
+ eq(nodeRuns.nodeId, causedBy.nodeId!),
+ eq(nodeRuns.idempotencyKey, idempotencyKey),
+ ne(nodeRuns.id, nodeRunData.id),
+ eq(nodeRuns.status, 'succeeded')
+ )
+ )
+ .limit(1);
+
+ if (previousSuccess.length > 0) {
+ const output =
+ typeof previousSuccess[0].output === 'string'
+ ? JSON.parse(previousSuccess[0].output)
+ : previousSuccess[0].output;
+ // Complete with cached result via callback (not event bus)
+ await complete({
+ resourceType: previousSuccess[0].resourceType as ResourceType,
+ resourceId: output?.resourceId || previousSuccess[0].id,
+ status: RESOURCE_STATUS_SUCCEEDED,
+ summary: output?.summary || 'Cached result from previous execution',
+ outputs: output?.outputs || output || {},
+ });
+ return;
+ }
+ }
+
+ let workItemId: string;
+ const subjectKind = nodeRunData.subjectKind as string;
+
+ if (subjectKind === 'task') {
+ const task = await tasksRepository.findById(nodeRunData.subjectId);
+ if (!task) {
+ throw new Error(`Task ${nodeRunData.subjectId} not found`);
+ }
+ workItemId = task.workItemId;
+ } else if (subjectKind === 'pr_request') {
+ const pr = await pullRequestsRepository.findById(nodeRunData.subjectId);
+ if (!pr) {
+ throw new Error(`PR request ${nodeRunData.subjectId} not found`);
+ }
+ workItemId = pr.workItemId;
+ } else if (subjectKind === 'worktree') {
+ // subjectId is the worktree path (worktree.id in context is worktreePath)
+ const [workItemByPath] = await db
+ .select()
+ .from(workItems)
+ .where(eq(workItems.worktreePath, nodeRunData.subjectId))
+ .limit(1);
+ if (!workItemByPath) {
+ throw new Error(`WorkItem for worktree ${nodeRunData.subjectId} not found`);
+ }
+ workItemId = workItemByPath.id;
+ } else {
+ workItemId = nodeRunData.subjectId;
+ }
+
+ const workItem = await workItemsRepository.findById(workItemId);
+ if (!workItem) {
+ throw new Error(`WorkItem ${workItemId} not found`);
+ }
+
+ const nodeRun: NodeRun = {
+ runId: nodeRunData.id,
+ workflowRunId: nodeRunData.workflowRunId,
+ nodeId: nodeRunData.nodeId,
+ resourceType: nodeRunData.resourceType as ResourceType,
+ subjectKind: nodeRunData.subjectKind as any,
+ subjectId: nodeRunData.subjectId,
+ subjectVersionAtStart: nodeRunData.subjectVersionAtStart,
+ status: nodeRunData.status as any,
+ attempt: nodeRunData.attempt,
+ idempotencyKey: nodeRunData.idempotencyKey || undefined,
+ input:
+ typeof nodeRunData.input === 'string' ? JSON.parse(nodeRunData.input) : nodeRunData.input,
+ output:
+ typeof nodeRunData.output === 'string'
+ ? JSON.parse(nodeRunData.output)
+ : nodeRunData.output,
+ startedAt: nodeRunData.startedAt?.toISOString(),
+ finishedAt: nodeRunData.finishedAt?.toISOString(),
+ };
+
+ const context: ResourceHandlerContext = {
+ workItem,
+ nodeRun,
+ input,
+ complete, // Pass completion callback to handlers for async resources
+ };
+
+ let result: ResourceResult;
+
+ try {
+ if (domainDispatcher.canHandle(resourceType)) {
+ result = await domainDispatcher.call(resourceType, input, context);
+ // Domain resources complete synchronously
+ await complete({
+ resourceType: result.resourceType,
+ resourceId: result.resourceId,
+ status: result.status,
+ summary: result.summary,
+ outputs: result.outputs,
+ });
+ } else if (opsDispatcher.canHandle(resourceType)) {
+ result = await opsDispatcher.call(resourceType, input, context);
+ // Op resources: AgentRun completes asynchronously (callback stored in handler)
+ // Other Op resources complete synchronously
+ if (resourceType !== 'AgentRun') {
+ await complete({
+ resourceType: result.resourceType,
+ resourceId: result.resourceId,
+ status: result.status,
+ summary: result.summary,
+ outputs: result.outputs,
+ });
+ }
+ // For AgentRun, the completion callback is stored in AgentRunResourceHandler
+ // and will be called by AgentService.finalizeAgentRun() when the agent completes
+ } else {
+ throw new Error(`No handler for resource type: ${resourceType}`);
+ }
+ } catch (error) {
+ // Complete with failed outcome on error
+ console.error(`[ResourceDispatcher] Resource call failed for NodeRun ${causedBy.nodeRunId}`, {
+ resourceType,
+ error: error instanceof Error ? error.message : String(error),
+ causedBy,
+ });
+ await complete({
+ resourceType,
+ resourceId: nodeRunData.id,
+ status: 'failed',
+ summary: error instanceof Error ? error.message : 'Unknown error',
+ outputs: {},
+ });
+ }
+ }
+}
+
+export const resourceDispatcher = new ResourceDispatcher();
diff --git a/backend/src/services/WorkItemEventService.ts b/backend/src/services/WorkItemEventService.ts
new file mode 100644
index 0000000..416e674
--- /dev/null
+++ b/backend/src/services/WorkItemEventService.ts
@@ -0,0 +1,169 @@
+/**
+ * WorkItemEventService - Wraps WorkItem operations with event emission
+ * Ensures all WorkItem state changes emit events for workflow orchestration.
+ * Updated to use uniform event envelope format and outbox pattern per optimized design.
+ */
+
+import { WORKITEM_STATUS_CLOSED } from 'git-vibe-shared';
+import { workflowEventBus } from './workflow/WorkflowEventBus.js';
+import { eventOutboxService } from './EventOutbox.js';
+import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
+import type { WorkItem } from '../types/models.js';
+
+export class WorkItemEventService {
+ /**
+ * Create a WorkItem and emit workitem.created (canonical action: create work item)
+ */
+ async createWorkItem(data: {
+ id: string;
+ projectId: string;
+ type: 'issue' | 'feature-request';
+ title: string;
+ body?: string;
+ }): Promise {
+ // Create workitem
+ const workItem = await workItemsRepository.create(data);
+
+ // Add event to outbox (should be in same transaction in production)
+ // For now, add after creation (outbox will ensure delivery)
+ const event = workflowEventBus.createEvent(
+ 'workitem.created',
+ { kind: 'workitem', id: workItem.id },
+ {
+ projectId: workItem.projectId,
+ type: workItem.type,
+ title: workItem.title,
+ body: workItem.body,
+ },
+ {
+ resourceVersion: 1,
+ }
+ );
+
+ await eventOutboxService.addEvent(event);
+
+ return workItem;
+ }
+
+ /**
+ * Update WorkItem metadata/status and emit workitem.updated, workitem.status.changed, workitem.closed
+ * (canonical action: update work item)
+ */
+ async updateWorkItem(
+ id: string,
+ data: {
+ title?: string;
+ body?: string;
+ status?: 'open' | 'closed';
+ }
+ ): Promise {
+ const existing = await workItemsRepository.findById(id);
+ if (!existing) {
+ return undefined;
+ }
+
+ const updated = await workItemsRepository.update(id, data);
+ if (!updated) {
+ return undefined;
+ }
+
+ const resourceVersion = (existing as any).version || 1;
+
+ // Add events to outbox (should be in same transaction in production)
+ if (data.title !== undefined || data.body !== undefined) {
+ const event = workflowEventBus.createEvent(
+ 'workitem.updated',
+ { kind: 'workitem', id },
+ {
+ title: updated.title,
+ body: updated.body ?? '',
+ },
+ {
+ resourceVersion: resourceVersion + 1,
+ }
+ );
+ await eventOutboxService.addEvent(event);
+ }
+
+ if (data.status !== undefined && data.status !== existing.status) {
+ const event = workflowEventBus.createEvent(
+ 'workitem.status.changed',
+ { kind: 'workitem', id },
+ {
+ oldStatus: existing.status,
+ newStatus: data.status,
+ },
+ {
+ resourceVersion: resourceVersion + 1,
+ }
+ );
+ await eventOutboxService.addEvent(event);
+ }
+
+ if (data.status === WORKITEM_STATUS_CLOSED && existing.status !== WORKITEM_STATUS_CLOSED) {
+ const event = workflowEventBus.createEvent(
+ 'workitem.closed',
+ { kind: 'workitem', id },
+ {},
+ {
+ resourceVersion: resourceVersion + 1,
+ }
+ );
+ await eventOutboxService.addEvent(event);
+ }
+
+ return updated;
+ }
+
+ /**
+ * Update WorkItem state managed by workflow (workspace fields).
+ * Emits workitem.workspace.ready when status becomes ready.
+ * Canonical action: update work item state (workspace).
+ */
+ async updateWorkItemState(
+ id: string,
+ data: {
+ workspaceStatus?: WorkItem['workspaceStatus'];
+ worktreePath?: string;
+ headBranch?: string;
+ baseBranch?: string;
+ baseSha?: string;
+ headSha?: string;
+ }
+ ): Promise {
+ const existing = await workItemsRepository.findById(id);
+ if (!existing) {
+ return undefined;
+ }
+
+ const updated = await workItemsRepository.update(id, data);
+ if (!updated) {
+ return undefined;
+ }
+
+ const resourceVersion = (existing as any).version || 1;
+
+ // Add event to outbox (should be in same transaction in production)
+ if (
+ data.workspaceStatus !== undefined &&
+ data.workspaceStatus !== existing.workspaceStatus &&
+ data.workspaceStatus === 'ready'
+ ) {
+ const worktreePath = updated.worktreePath ?? '';
+ const headBranch = updated.headBranch ?? '';
+ const event = workflowEventBus.createEvent(
+ 'workitem.workspace.ready',
+ { kind: 'workitem', id },
+ { worktreePath, headBranch },
+ {
+ resourceVersion: resourceVersion + 1,
+ }
+ );
+ await eventOutboxService.addEvent(event);
+ }
+
+ return updated;
+ }
+}
+
+export const workItemEventService = new WorkItemEventService();
diff --git a/backend/src/services/WorkspaceService.ts b/backend/src/services/WorkspaceService.ts
index d5bf187..2b9b913 100644
--- a/backend/src/services/WorkspaceService.ts
+++ b/backend/src/services/WorkspaceService.ts
@@ -1,12 +1,24 @@
-import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
-import { gitService } from './GitService.js';
+import { gitService } from './git/GitService.js';
import type { WorkItem, Project } from '../types/models.js';
import path from 'node:path';
import fs from 'node:fs/promises';
import { STORAGE_CONFIG } from '../config/storage.js';
+/**
+ * Workspace state returned by workspace operations
+ */
+export interface WorkspaceState {
+ worktreePath: string;
+ headBranch: string;
+ baseBranch: string;
+ baseSha: string;
+ headSha: string;
+ workspaceStatus: 'ready' | 'not_initialized' | 'error';
+}
+
/**
* WorkspaceService manages worktree initialization and maintenance for WorkItems
+ * Refactored to be stateless - returns workspace state instead of updating WorkItem directly
*
* Per PLAN.md Section 6:
* - Ensure relay repo is present and clean
@@ -14,16 +26,19 @@ import { STORAGE_CONFIG } from '../config/storage.js';
* - Resolve base SHA: git rev-parse
* - Create head branch name: head_branch = "wi/"
* - Create worktree: git worktree add -b
- * - Persist workspace fields and set workspace_status=ready
+ * - Return workspace state (workflow will update WorkItem)
*/
export class WorkspaceService {
/**
* Initialize workspace for a WorkItem
* Creates worktree and branch if they don't exist
+ * Returns workspace state - workflow will update WorkItem
*/
- async initWorkspace(workItem: WorkItem, project: Project): Promise {
+ async initWorkspace(workItemId: string, project: Project): Promise {
const repoPath = project.relayRepoPath || project.sourceRepoPath;
- const baseBranch = project.defaultBranch;
+ // Per git_sync_flow_design: when using relay repo, worktrees branch from relay (integration branch)
+ // so PR merge targets relay; manual sync then pushes relay → mirror → source
+ const baseBranch = project.relayRepoPath ? 'relay' : project.defaultBranch;
// Step 1: Ensure relay repo is present and clean
await gitService.validateRepo(repoPath);
@@ -35,10 +50,10 @@ export class WorkspaceService {
const baseSha = gitService.getRefSha(repoPath, baseBranch);
// Step 4: Create head branch name
- const headBranch = `wi/${workItem.id}`;
+ const headBranch = `wi/${workItemId}`;
// Step 5: Create worktree path
- const worktreePath = path.join(STORAGE_CONFIG.worktreesDir, workItem.id);
+ const worktreePath = path.join(STORAGE_CONFIG.worktreesDir, workItemId);
// Step 6: Check if worktree already exists
const worktreeStatus = gitService.getWorktreeStatus(repoPath, worktreePath);
@@ -54,30 +69,24 @@ export class WorkspaceService {
// Worktree exists and directory is present, refresh head SHA
const headSha = gitService.getWorktreeHead(worktreePath);
- // Update WorkItem with current state
- const updated = await workItemsRepository.update(workItem.id, {
+ // Return workspace state - workflow will update WorkItem
+ return {
worktreePath,
headBranch,
baseBranch,
baseSha,
headSha,
workspaceStatus: 'ready',
- });
-
- if (!updated) {
- throw new Error(`Failed to update WorkItem ${workItem.id}`);
- }
-
- return updated;
- } else {
- // Worktree is registered but directory is missing, prune stale worktree
- try {
- gitService.pruneWorktrees(repoPath);
- } catch (error) {
- console.warn(
- `Warning when pruning worktrees: ${error instanceof Error ? error.message : String(error)}`
- );
- }
+ };
+ }
+ } else {
+ // Worktree is registered but directory is missing, prune stale worktree
+ try {
+ gitService.pruneWorktrees(repoPath);
+ } catch (error) {
+ console.warn(
+ `Warning when pruning worktrees: ${error instanceof Error ? error.message : String(error)}`
+ );
}
}
@@ -87,6 +96,7 @@ export class WorkspaceService {
.access(worktreePath)
.then(() => true)
.catch(() => false);
+
if (dirExists) {
// Directory exists but is not a valid worktree, remove it
await fs.rm(worktreePath, { recursive: true, force: true });
@@ -114,74 +124,99 @@ export class WorkspaceService {
// Step 9: Get initial head SHA (same as baseSha initially)
const headSha = gitService.getWorktreeHead(worktreePath);
- // Step 10: Persist workspace fields
- const updated = await workItemsRepository.update(workItem.id, {
+ // Step 10: Return workspace state - workflow will update WorkItem
+ return {
worktreePath,
headBranch,
baseBranch,
baseSha,
headSha,
workspaceStatus: 'ready',
- });
-
- if (!updated) {
- throw new Error(`Failed to update WorkItem ${workItem.id}`);
- }
+ };
+ }
- return updated;
+ /**
+ * Refresh cached head SHA for a WorkItem
+ * Returns new head SHA - workflow will update WorkItem
+ */
+ async refreshHeadSha(worktreePath: string): Promise {
+ // Get current head SHA from worktree
+ return gitService.getWorktreeHead(worktreePath);
}
/**
- * Ensure workspace exists (idempotent)
- * Returns the WorkItem with workspace initialized
+ * Get workspace state if workspace already exists
+ * Returns null if workspace doesn't exist
*/
- async ensureWorkspace(workItem: WorkItem, project: Project): Promise {
- // If workspace is already ready, just refresh head SHA
+ async getWorkspaceState(workItem: WorkItem, project: Project): Promise {
if (workItem.workspaceStatus === 'ready' && workItem.worktreePath) {
const repoPath = project.relayRepoPath || project.sourceRepoPath;
const worktreeStatus = gitService.getWorktreeStatus(repoPath, workItem.worktreePath);
if (worktreeStatus === 'present') {
// Worktree exists, refresh head SHA
- return await this.refreshHeadSha(workItem);
+ const headSha = await this.refreshHeadSha(workItem.worktreePath);
+ return {
+ worktreePath: workItem.worktreePath,
+ headBranch: workItem.headBranch || `wi/${workItem.id}`,
+ baseBranch: workItem.baseBranch || project.defaultBranch,
+ baseSha: workItem.baseSha || gitService.getRefSha(repoPath, project.defaultBranch),
+ headSha,
+ workspaceStatus: 'ready',
+ };
}
}
- // Initialize workspace
- return await this.initWorkspace(workItem, project);
+ return null;
}
/**
- * Refresh cached head SHA for a WorkItem
+ * Ensure workspace exists (idempotent)
+ * Returns updated WorkItem with workspace state
*/
- async refreshHeadSha(workItem: WorkItem): Promise {
- if (!workItem.worktreePath) {
- throw new Error(`WorkItem ${workItem.id} has no worktree path`);
+ async ensureWorkspace(workItem: WorkItem, project: Project): Promise {
+ // Check if workspace already exists
+ const existingState = await this.getWorkspaceState(workItem, project);
+ if (existingState) {
+ return {
+ ...workItem,
+ worktreePath: existingState.worktreePath,
+ headBranch: existingState.headBranch,
+ baseBranch: existingState.baseBranch,
+ baseSha: existingState.baseSha,
+ headSha: existingState.headSha,
+ workspaceStatus: existingState.workspaceStatus,
+ };
}
- // Get current head SHA from worktree
- const headSha = gitService.getWorktreeHead(workItem.worktreePath);
-
- // Update WorkItem with new head SHA
- const updated = await workItemsRepository.update(workItem.id, {
- headSha,
- });
-
- if (!updated) {
- throw new Error(`Failed to update WorkItem ${workItem.id}`);
+ // Initialize workspace
+ const workspaceState = await this.initWorkspace(workItem.id, project);
+ if (workspaceState) {
+ // Return updated WorkItem with workspace state
+ return {
+ ...workItem,
+ worktreePath: workspaceState.worktreePath,
+ headBranch: workspaceState.headBranch,
+ baseBranch: workspaceState.baseBranch,
+ baseSha: workspaceState.baseSha,
+ headSha: workspaceState.headSha,
+ workspaceStatus: workspaceState.workspaceStatus,
+ };
}
-
- return updated;
+ return workItem;
}
/**
* Remove worktree for a WorkItem
- * Does not delete the branch, only removes the worktree
+ * Does not delete branch, only removes worktree
+ * Returns updated workspace state (workflow will persist)
*/
- async removeWorktree(workItem: WorkItem, project: Project): Promise {
+ async removeWorktree(workItem: WorkItem, project: Project): Promise> {
if (!workItem.worktreePath) {
// No worktree to remove
- return;
+ return {
+ workspaceStatus: 'not_initialized',
+ };
}
const repoPath = project.relayRepoPath || project.sourceRepoPath;
@@ -194,27 +229,27 @@ export class WorkspaceService {
gitService.removeWorktree(workItem.worktreePath, repoPath);
}
- // Update WorkItem to reflect worktree removal
- await workItemsRepository.update(workItem.id, {
- worktreePath: undefined,
+ // Return state indicating worktree removal (workflow will update WorkItem)
+ return {
+ worktreePath: '',
workspaceStatus: 'not_initialized',
- });
+ };
}
/**
* Delete both worktree and branch for a WorkItem
* Use this when permanently deleting a WorkItem
- * Does not update the WorkItem in the database (since it's being deleted)
+ * Does not update WorkItem in database (since it's being deleted)
*/
async deleteWorkspace(workItem: WorkItem, project: Project): Promise {
if (!workItem.worktreePath) {
- // No workspace to delete
+ // No worktree to delete
return;
}
const repoPath = project.relayRepoPath || project.sourceRepoPath;
- // Remove worktree directly (don't call removeWorktree as it tries to update the WorkItem)
+ // Remove worktree directly (don't call removeWorktree as it tries to update WorkItem)
try {
const worktreeStatus = gitService.getWorktreeStatus(repoPath, workItem.worktreePath);
if (worktreeStatus === 'present') {
@@ -231,6 +266,7 @@ export class WorkspaceService {
.access(workItem.worktreePath)
.then(() => true)
.catch(() => false);
+
if (dirExists) {
await fs.rm(workItem.worktreePath, { recursive: true, force: true });
}
diff --git a/backend/src/services/AgentAdapter.ts b/backend/src/services/agent/AgentAdapter.ts
similarity index 86%
rename from backend/src/services/AgentAdapter.ts
rename to backend/src/services/agent/AgentAdapter.ts
index e03632e..e5320b2 100644
--- a/backend/src/services/AgentAdapter.ts
+++ b/backend/src/services/agent/AgentAdapter.ts
@@ -3,11 +3,20 @@
* All code agent implementations should extend this class
*/
-import { spawn, execSync } from 'node:child_process';
+import { spawn, execSync, exec } from 'node:child_process';
+import { promisify } from 'node:util';
import { promises as fs } from 'node:fs';
import path from 'node:path';
-import os from 'node:os';
-import { gitService } from './GitService.js';
+import {
+ AGENT_RUN_STATUS_RUNNING,
+ AGENT_RUN_STATUS_QUEUED,
+ AGENT_RUN_STATUS_SUCCEEDED,
+ AGENT_RUN_STATUS_FAILED,
+} from 'git-vibe-shared';
+import { gitService } from '../git/GitService.js';
+import { STORAGE_CONFIG } from '../../config/storage.js';
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
+import { workItemsRepository } from '../../repositories/WorkItemsRepository.js';
export type AgentModel = {
id: string;
@@ -23,6 +32,8 @@ export type AgentRunParams = {
executablePath: string;
baseArgs?: string[];
};
+ /** When set, the adapter should continue in this session (e.g. opencode run --session ) */
+ sessionId?: string | null;
};
export type AgentCorrectionParams = {
@@ -56,13 +67,13 @@ export type SessionData = Record;
export abstract class AgentAdapter {
protected activeProcesses = new Map>();
+ protected processPids = new Map(); // Track PID for each runId
protected sessionCache = new Map();
/**
* Get the logs directory path for storing agent run logs
*/
protected async getLogsDir(): Promise {
- const { STORAGE_CONFIG } = await import('../config/storage.js');
return STORAGE_CONFIG.logsDir;
}
@@ -139,14 +150,13 @@ export abstract class AgentAdapter {
if (this.activeProcesses.has(runId)) {
- return { status: 'running' };
+ return { status: AGENT_RUN_STATUS_RUNNING };
}
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
const agentRun = await agentRunsRepository.findById(runId);
if (!agentRun) {
- return { status: 'queued' };
+ return { status: AGENT_RUN_STATUS_QUEUED };
}
return { status: agentRun.status as AgentStatus };
@@ -175,18 +185,9 @@ export abstract class AgentAdapter>,
onBeforeUpdate?: () => Promise
): Promise {
- const status = exitCode === 0 ? 'succeeded' : 'failed';
+ const status = exitCode === 0 ? AGENT_RUN_STATUS_SUCCEEDED : AGENT_RUN_STATUS_FAILED;
const headShaBefore = gitService.getWorktreeHead(worktreePath);
const headShaAfter = gitService.getWorktreeHead(worktreePath);
await logFile.close();
this.activeProcesses.delete(runId);
+ this.processPids.delete(runId);
if (onBeforeUpdate) {
await onBeforeUpdate();
}
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
const logPath = await this.getLogFilePath(runId);
await agentRunsRepository.update(runId, {
@@ -409,6 +410,7 @@ export abstract class AgentAdapter>,
onBeforeUpdate?: () => Promise
): Promise {
- const status = exitCode === 0 ? 'succeeded' : 'failed';
+ const status = exitCode === 0 ? AGENT_RUN_STATUS_SUCCEEDED : AGENT_RUN_STATUS_FAILED;
const headShaBefore = gitService.getWorktreeHead(worktreePath);
const headShaAfter = gitService.getWorktreeHead(worktreePath);
await stdoutFile.close();
await stderrFile.close();
this.activeProcesses.delete(runId);
+ this.processPids.delete(runId);
if (onBeforeUpdate) {
await onBeforeUpdate();
}
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
const logPath = await this.getLogFilePath(runId);
const stdoutPath = await this.getStdoutPath(runId);
const stderrPath = await this.getStderrPath(runId);
@@ -472,6 +473,7 @@ export abstract class AgentAdapter {
+ const execPromise = promisify(exec);
+ let stdout = '';
+ let stderr = '';
+ let exitCode: number | null = null;
+
+ try {
+ const result = await execPromise(command, {
+ ...options,
+ encoding: options.encoding || 'utf-8',
+ });
+ stdout = result.stdout || '';
+ stderr = result.stderr || '';
+ exitCode = 0;
+ } catch (error: unknown) {
+ // When exec fails, the error contains stdout, stderr, and code
+ const err = error as { stdout?: string; stderr?: string; message?: string; code?: number };
+ stdout = err.stdout || '';
+ stderr = err.stderr || err.message || 'Unknown error';
+ exitCode = err.code ?? 1;
+ }
+
+ return { stdout, stderr, exitCode };
+ }
+
/**
* Cache session data for a run
*/
@@ -560,12 +596,24 @@ export abstract class AgentAdapter {
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
-
const sessionData = JSON.stringify(session);
await agentRunsRepository.update(runId, {
diff --git a/backend/src/services/agent/AgentRunRecoveryService.ts b/backend/src/services/agent/AgentRunRecoveryService.ts
new file mode 100644
index 0000000..34cee89
--- /dev/null
+++ b/backend/src/services/agent/AgentRunRecoveryService.ts
@@ -0,0 +1,170 @@
+/**
+ * AgentRunRecoveryService
+ * Recovers interrupted agent runs on service restart by checking if their PIDs are still running.
+ * If a PID doesn't exist, the WorkItem is resumed by sending "Continue" message.
+ */
+
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
+import { workItemsRepository } from '../../repositories/WorkItemsRepository.js';
+import { workflowEventBus } from '../workflow/WorkflowEventBus.js';
+import { openCodeAgentAdapter } from './OpenCodeAgentAdapter.js';
+import { claudeCodeAgentAdapter } from './ClaudeCodeAgentAdapter.js';
+
+/**
+ * Check if a process with the given PID is still running
+ */
+function isProcessRunning(pid: number): boolean {
+ try {
+ // On Unix-like systems, sending signal 0 to a process checks if it exists
+ // This doesn't kill the process, just checks if it's alive
+ process.kill(pid, 0);
+ return true;
+ } catch (error) {
+ // If the error is ESRCH (no such process), the process doesn't exist
+ // If it's EPERM (permission denied), the process exists but we can't signal it
+ const err = error as NodeJS.ErrnoException;
+ if (err.code === 'ESRCH') {
+ return false;
+ }
+ // For EPERM or other errors, assume the process exists (safer default)
+ return true;
+ }
+}
+
+export class AgentRunRecoveryService {
+ /**
+ * Recover interrupted agent runs on service startup
+ * For every unfinished WorkItem (status='running'), verify its PID exists.
+ * If missing, treat the process as unexpectedly terminated and resume the WorkItem.
+ */
+ async recoverInterruptedRuns(): Promise {
+ console.log('[AgentRunRecoveryService] Starting recovery of interrupted agent runs...');
+
+ try {
+ // Find all agent runs with status 'running'
+ const allWorkItems = await workItemsRepository.findAll();
+ const unfinishedRuns: Array<{ workItemId: string; agentRunId: string; pid: number | null }> =
+ [];
+
+ for (const workItem of allWorkItems) {
+ const agentRuns = await agentRunsRepository.findByWorkItemId(workItem.id);
+ for (const run of agentRuns) {
+ if (run.status === 'running') {
+ unfinishedRuns.push({
+ workItemId: workItem.id,
+ agentRunId: run.id,
+ pid: run.pid ?? null,
+ });
+ }
+ }
+ }
+
+ console.log(
+ `[AgentRunRecoveryService] Found ${unfinishedRuns.length} unfinished agent runs to check`
+ );
+
+ // Check each unfinished run
+ for (const { workItemId, agentRunId, pid } of unfinishedRuns) {
+ if (!pid) {
+ console.log(
+ `[AgentRunRecoveryService] Agent run ${agentRunId} has no PID, marking as failed`
+ );
+ await agentRunsRepository.update(agentRunId, {
+ status: 'failed',
+ finishedAt: new Date(),
+ log: 'Process terminated unexpectedly (no PID recorded)',
+ });
+ // Release lock on WorkItem
+ await workItemsRepository.releaseLock(workItemId, agentRunId);
+ continue;
+ }
+
+ // Get the agent run to determine which adapter was used
+ const agentRun = await agentRunsRepository.findById(agentRunId);
+ if (!agentRun) {
+ console.error(
+ `[AgentRunRecoveryService] Agent run ${agentRunId} not found, skipping recovery`
+ );
+ continue;
+ }
+
+ // Check if PID exists in memory cache (adapter's processPids map)
+ // Use the appropriate adapter based on agentKey
+ let pidInCache = false;
+ if (agentRun.agentKey === 'opencode') {
+ pidInCache = openCodeAgentAdapter.hasPid(agentRunId);
+ } else if (agentRun.agentKey === 'claudecode') {
+ pidInCache = claudeCodeAgentAdapter.hasPid(agentRunId);
+ }
+
+ // If PID is not in cache, check if the process is still running
+ if (!pidInCache) {
+ const processExists = isProcessRunning(pid);
+ if (!processExists) {
+ console.log(
+ `[AgentRunRecoveryService] Process ${pid} for agent run ${agentRunId} is not running, resuming WorkItem ${workItemId}`
+ );
+
+ // Mark the run as failed since the process is dead
+ await agentRunsRepository.update(agentRunId, {
+ status: 'failed',
+ finishedAt: new Date(),
+ log: `Process ${pid} terminated unexpectedly. Resuming WorkItem.`,
+ });
+ // Release lock on WorkItem before resuming
+ await workItemsRepository.releaseLock(workItemId, agentRunId);
+
+ // If the agent run has a sessionId, we can resume it
+ if (agentRun.sessionId) {
+ // Resume the WorkItem by emitting workitem.task.resume event with "Continue" message
+ console.log(
+ `[AgentRunRecoveryService] Resuming WorkItem ${workItemId} with sessionId ${agentRun.sessionId}`
+ );
+ await workflowEventBus.emit({
+ eventId: crypto.randomUUID(),
+ at: new Date().toISOString(),
+ subject: { kind: 'workitem', id: workItemId },
+ type: 'workitem.task.resume',
+ workItemId,
+ data: {
+ originalAgentRunId: agentRunId,
+ sessionId: agentRun.sessionId,
+ prompt: 'Continue',
+ title: (await workItemsRepository.findById(workItemId))?.title || '',
+ body: (await workItemsRepository.findById(workItemId))?.body || '',
+ },
+ });
+ } else {
+ console.log(
+ `[AgentRunRecoveryService] Agent run ${agentRunId} has no sessionId, cannot resume`
+ );
+ }
+ } else {
+ // Process exists but not in cache - restore it to cache
+ console.log(
+ `[AgentRunRecoveryService] Process ${pid} exists but not in cache, restoring to cache for agent run ${agentRunId}`
+ );
+ // Restore PID to the appropriate adapter's cache
+ // Note: We access the protected processPids map directly since we need to restore state
+ if (agentRun.agentKey === 'opencode') {
+ (openCodeAgentAdapter as any).processPids?.set(agentRunId, pid);
+ } else if (agentRun.agentKey === 'claudecode') {
+ (claudeCodeAgentAdapter as any).processPids?.set(agentRunId, pid);
+ }
+ }
+ } else {
+ console.log(
+ `[AgentRunRecoveryService] Agent run ${agentRunId} PID ${pid} is in cache, process is running`
+ );
+ }
+ }
+
+ console.log('[AgentRunRecoveryService] Recovery completed');
+ } catch (error) {
+ console.error('[AgentRunRecoveryService] Error during recovery:', error);
+ // Don't throw - recovery failure shouldn't prevent server startup
+ }
+ }
+}
+
+export const agentRunRecoveryService = new AgentRunRecoveryService();
diff --git a/backend/src/services/AgentService.ts b/backend/src/services/agent/AgentService.ts
similarity index 54%
rename from backend/src/services/AgentService.ts
rename to backend/src/services/agent/AgentService.ts
index 98dc148..21bfd8d 100644
--- a/backend/src/services/AgentService.ts
+++ b/backend/src/services/agent/AgentService.ts
@@ -1,15 +1,22 @@
import { v4 as uuidv4 } from 'uuid';
-import { projectsRepository } from '../repositories/ProjectsRepository.js';
-import { workItemsRepository } from '../repositories/WorkItemsRepository.js';
-import { agentRunsRepository } from '../repositories/AgentRunsRepository.js';
-import { pullRequestsRepository } from '../repositories/PullRequestsRepository.js';
-import { gitService } from '../services/GitService.js';
-import { workspaceService } from './WorkspaceService.js';
-import { prService } from './PRService.js';
+import {
+ PR_STATUS_OPEN,
+ AGENT_RUN_STATUS_RUNNING,
+ AGENT_RUN_STATUS_SUCCEEDED,
+ AGENT_RUN_STATUS_FAILED,
+} from 'git-vibe-shared';
+import { projectsRepository } from '../../repositories/ProjectsRepository.js';
+import { workItemsRepository } from '../../repositories/WorkItemsRepository.js';
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
+import { pullRequestsRepository } from '../../repositories/PullRequestsRepository.js';
+import { gitService } from '../git/GitService.js';
+import { workspaceService } from '../WorkspaceService.js';
+import { prService } from '../PRService.js';
import { openCodeAgentAdapter } from './OpenCodeAgentAdapter.js';
import { claudeCodeAgentAdapter } from './ClaudeCodeAgentAdapter.js';
-import { PromptBuilder } from './PromptBuilder.js';
-import type { Project, WorkItem, AgentRun, PullRequest } from '../types/models.js';
+import { workflowEventBus } from '../workflow/WorkflowEventBus.js';
+import { getAndRemoveAgentRunCompletionCallback } from '../OpsDispatcher.js';
+import type { Project, WorkItem, AgentRun, PullRequest, Task } from '../../types/models.js';
export type AgentType = 'opencode' | 'claudecode';
@@ -52,13 +59,6 @@ export class AgentService {
]);
}
- /**
- * Get the number of currently running tasks for a project
- */
- private getRunningTaskCount(projectId: string): number {
- return this.runningTasksPerProject.get(projectId)?.size || 0;
- }
-
/**
* Check if a project can start a new task based on concurrency limit
*/
@@ -74,6 +74,13 @@ export class AgentService {
return runningCount < maxConcurrency;
}
+ /**
+ * Get count of running agent runs for a project
+ */
+ private getRunningTaskCount(projectId: string): number {
+ return this.runningTasksPerProject.get(projectId)?.size ?? 0;
+ }
+
/**
* Track a task as running for a project
*/
@@ -142,57 +149,6 @@ export class AgentService {
};
}
- /**
- * Open a PR for a WorkItem
- * Rules:
- * - Deterministic branch name: Same WorkItem → same branch name every time
- * - Stable worktree: Don't delete worktree unless WorkItem is closed/deleted
- * - Reopen support: Reuse existing worktree when reopening a WorkItem
- */
- private async openPRForWorkItem(workItem: WorkItem, project: Project): Promise {
- // Ensure workspace is initialized
- const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
-
- // Open PR for the WorkItem
- const pr = await prService.openPR(updatedWorkItem, project);
-
- return pr;
- }
-
- /**
- * Close existing PR if there's no diff between base and head
- * This ensures PRs with no changes are automatically closed
- */
- private async closeExistingPRIfNoDiff(workItem: WorkItem, headSha?: string): Promise {
- // Check if PR exists for this WorkItem
- const existingPR = await pullRequestsRepository.findByWorkItemId(workItem.id);
- if (!existingPR || existingPR.status !== 'open') {
- // No PR exists or PR is already closed/merged
- return;
- }
-
- // Verify there's actually no diff before closing
- if (!workItem.worktreePath || !workItem.baseSha) {
- // Can't verify diff, skip closing
- return;
- }
-
- try {
- // Use provided headSha or get current HEAD
- const currentHeadSha = headSha || gitService.getHeadSha(workItem.worktreePath);
- const diff = gitService.getDiff(workItem.baseSha, currentHeadSha, workItem.worktreePath);
- const hasActualChanges = diff.trim().length > 0;
-
- if (!hasActualChanges) {
- // No diff - close the PR
- await prService.closePR(existingPR);
- }
- } catch (error) {
- // If we can't get the diff, don't close the PR (fail safe)
- console.error(`Failed to check diff for PR ${existingPR.id}:`, error);
- }
- }
-
/**
* Clean up worktree for a closed WorkItem
* Only removes worktree when WorkItem is closed or deleted
@@ -208,22 +164,29 @@ export class AgentService {
}
/**
- * Start an agent run for a WorkItem
+ * Start an agent run for a WorkItem (stateless)
+ * Assumes workspace is already initialized - workflow handles workspace initialization
+ * Returns AgentRun and does not orchestrate workspace or PR creation
*/
- private async startAgentRun(
- workItem: WorkItem,
+ async startAgentRun(
+ workItemId: string,
project: Project,
+ worktreePath: string,
prompt: string,
agentParams: AgentParams,
options?: {
sessionId?: string;
linkedAgentRunId?: string;
+ taskId?: string;
+ idempotencyKey?: string;
+ nodeRunId?: string;
}
): Promise {
// Validate prompt
if (!prompt || typeof prompt !== 'string') {
throw new Error('Prompt is required and must be a string');
}
+
// Check concurrency limit
const canStart = await this.canStartTask(project.id);
if (!canStart) {
@@ -232,19 +195,16 @@ export class AgentService {
);
}
- // Ensure workspace is initialized
- const updatedWorkItem = await workspaceService.ensureWorkspace(workItem, project);
-
// Acquire workspace lock
const runId = uuidv4();
const lockAcquired = await workItemsRepository.acquireLock(
- updatedWorkItem.id,
+ workItemId,
runId,
3600000 * 6 // Default TTL: 6 hour in milliseconds
);
if (!lockAcquired) {
- const lockStatus = await workItemsRepository.isLocked(updatedWorkItem.id);
+ const lockStatus = await workItemsRepository.isLocked(workItemId);
throw new Error(
`WorkItem is locked by another agent run. Owner: ${lockStatus.ownerRunId}, Expires: ${lockStatus.expiresAt}`
);
@@ -258,16 +218,17 @@ export class AgentService {
// Validate agent executable
await adapter.validate({ executablePath: config.executablePath });
- // Determine session_id: WorkItem-scoped by default
- const sessionId = options?.sessionId || `wi-${updatedWorkItem.id}`;
+ // Determine session_id: Use provided sessionId or null (adapter will persist actual session)
+ // Do not create fake initial session - adapter will list and persist sessions before/during execution
+ const sessionId = options?.sessionId || null;
// Determine head SHA before run
- const headShaBefore = gitService.getHeadSha(updatedWorkItem.worktreePath || '');
+ const headShaBefore = gitService.getHeadSha(worktreePath);
// Create agent run record
const agentRun = await agentRunsRepository.create({
id: runId,
- workItemId: updatedWorkItem.id,
+ workItemId,
projectId: project.id,
agentKey: agentType,
inputSummary: prompt ? prompt.substring(0, 200) : undefined,
@@ -277,11 +238,14 @@ export class AgentService {
}),
sessionId,
linkedAgentRunId: options?.linkedAgentRunId,
+ taskId: options?.taskId || null,
+ idempotencyKey: options?.idempotencyKey || null,
+ nodeRunId: options?.nodeRunId || null,
});
// Mark as running
await agentRunsRepository.update(runId, {
- status: 'running',
+ status: AGENT_RUN_STATUS_RUNNING,
startedAt: new Date(),
headShaBefore,
});
@@ -292,32 +256,34 @@ export class AgentService {
// Execute agent asynchronously
adapter
.run({
- worktreePath: updatedWorkItem.worktreePath || '',
+ worktreePath,
agentRunId: runId,
prompt,
config,
+ sessionId: sessionId ?? undefined,
})
.catch(async (error: unknown) => {
await agentRunsRepository.update(runId, {
- status: 'failed',
+ status: AGENT_RUN_STATUS_FAILED,
log: `Failed to start agent process: ${error instanceof Error ? error.message : String(error)}`,
finishedAt: new Date(),
});
this.untrackRunningTask(project.id, runId);
// Release lock
- await workItemsRepository.releaseLock(updatedWorkItem.id, runId);
+ await workItemsRepository.releaseLock(workItemId, runId);
});
return agentRun;
} catch (error) {
// Release lock on error
- await workItemsRepository.releaseLock(updatedWorkItem.id, runId);
+ await workItemsRepository.releaseLock(workItemId, runId);
throw error;
}
}
/**
- * Execute a task: start agent automatically (PR will be created after agent finishes if there are changes)
+ * Execute a task: ensure workspace and start agent run.
+ * Used by the agent-runs route when starting a run from the API.
*/
async executeTask(
projectId: string,
@@ -342,26 +308,51 @@ export class AgentService {
// Parse agent params from project
const agentParams = this.parseAgentParams(project.agentParams);
- // Ensure workspace is initialized (needed for agent run, but don't create PR yet)
- await workspaceService.ensureWorkspace(workItem, project);
+ const workspaceState = await workspaceService.ensureWorkspace(workItem, project);
+ if (!workItem.worktreePath) {
+ await workItemsRepository.update(workItemId, {
+ ...workspaceState,
+ worktreePath: workspaceState.worktreePath ?? undefined,
+ body: workspaceState.body ?? undefined,
+ headBranch: workspaceState.headBranch ?? undefined,
+ baseBranch: workspaceState.baseBranch ?? undefined,
+ baseSha: workspaceState.baseSha ?? undefined,
+ headSha: workspaceState.headSha ?? undefined,
+ });
+ workItem.worktreePath = workspaceState.worktreePath;
+ }
// Build prompt from work item or user message
let prompt: string;
if (userMessage && userMessage.trim()) {
// For conversation messages, use markdown format
- prompt = PromptBuilder.buildConversationPrompt(userMessage);
+ prompt = `## User Message\n\n${userMessage.trim()}`;
} else {
// For regular task execution, use markdown format
const description = workItem.body ?? workItemBody ?? '';
- prompt = PromptBuilder.buildTaskPrompt(workItemTitle, description);
+ if (!description || !description.trim()) {
+ prompt = `## Task\n\n${workItemTitle}`;
+ } else {
+ prompt = `## Task\n\n${workItemTitle}\n\n## Description\n\n${description.trim()}`;
+ }
}
console.log(`[AgentService] Building prompt for work item ${workItemId}`);
console.log(`[AgentService] Title: ${workItemTitle}`);
console.log(`[AgentService] Final prompt length: ${prompt.length} characters`);
- // Start agent run
- const agentRun = await this.startAgentRun(workItem, project, prompt, agentParams);
+ // Start agent run (stateless version)
+ const worktreePath = workItem.worktreePath || workspaceState.worktreePath;
+ if (!worktreePath) {
+ throw new Error(`WorkItem ${workItemId} has no worktree path`);
+ }
+ const agentRun = await this.startAgentRun(
+ workItemId,
+ project,
+ worktreePath,
+ prompt,
+ agentParams
+ );
return {
workItem: {
@@ -369,7 +360,6 @@ export class AgentService {
title: workItemTitle,
body: workItemBody,
},
- // PR will be created in finalizeAgentRun if there are changes
agentRun,
};
} catch (error) {
@@ -386,230 +376,180 @@ export class AgentService {
/**
* Cancel a running agent task
*/
- async cancelTask(agentRunId: string): Promise {
- const agentRun = await agentRunsRepository.findById(agentRunId);
- if (!agentRun) {
- throw new Error('Agent run not found');
+ async cancelTask(taskId: string): Promise {
+ const { tasksRepository } = await import('../../repositories/TasksRepository.js');
+ const task = await tasksRepository.findById(taskId);
+
+ if (!task) {
+ throw new Error('Task not found');
+ }
+
+ // Cancel the task's current agent run if running
+ if (task.currentAgentRunId) {
+ const agentRun = await agentRunsRepository.findById(task.currentAgentRunId);
+ if (agentRun && agentRun.status === AGENT_RUN_STATUS_RUNNING) {
+ const adapter = this.getAgentAdapter(agentRun.agentKey as AgentType);
+ await adapter.cancel(agentRun.id);
+ await agentRunsRepository.update(agentRun.id, {
+ status: 'cancelled',
+ finishedAt: new Date(),
+ });
+ const workItem = await workItemsRepository.findById(task.workItemId);
+ if (workItem) {
+ this.untrackRunningTask(workItem.projectId, agentRun.id);
+ await workItemsRepository.releaseLock(workItem.id, agentRun.id);
+ }
+ }
}
- const adapter = this.getAgentAdapter(agentRun.agentKey as AgentType);
- await adapter.cancel(agentRunId);
-
- await agentRunsRepository.update(agentRunId, {
- status: 'cancelled',
- finishedAt: new Date(),
- });
-
- // Get WorkItem to untrack task and release lock
- const workItem = await workItemsRepository.findById(agentRun.workItemId);
- if (workItem) {
- this.untrackRunningTask(workItem.projectId, agentRunId);
- await workItemsRepository.releaseLock(workItem.id, agentRunId);
- }
+ // Update task status to canceled
+ await tasksRepository.updateStatus(task.id, 'canceled');
}
/**
* Resume a task using the same session_id
+ * Triggers workflow by emitting workitem.task.resume event
*/
- async resumeTask(agentRunId: string, prompt: string): Promise {
- const agentRun = await agentRunsRepository.findById(agentRunId);
- if (!agentRun) {
- throw new Error('Agent run not found');
- }
-
- // Check if the original run has a session_id
- if (!agentRun.sessionId) {
- throw new Error('Cannot resume task: original task has no session_id');
- }
+ async resumeTask(taskId: string, prompt: string): Promise {
+ const { tasksRepository } = await import('../../repositories/TasksRepository.js');
+ const task = await tasksRepository.findById(taskId);
- const workItem = await workItemsRepository.findById(agentRun.workItemId);
- if (!workItem) {
- throw new Error('WorkItem not found');
- }
-
- const project = await projectsRepository.findById(workItem.projectId);
- if (!project) {
- throw new Error('Project not found');
+ if (!task) {
+ throw new Error('Task not found');
}
- const agentParams = this.parseAgentParams(project.agentParams);
- const agentType = agentParams.agentType || (project.defaultAgent as AgentType) || 'opencode';
- const adapter = this.getAgentAdapter(agentType);
- const config = this.buildAgentConfig(project, agentParams);
+ const agentRun = task.currentAgentRunId
+ ? ((await agentRunsRepository.findById(task.currentAgentRunId)) ?? null)
+ : null;
- // Check concurrency limit
- const canStart = await this.canStartTask(project.id);
- if (!canStart) {
- throw new Error(
- `Maximum agent concurrency limit (${project.maxAgentConcurrency || 3}) reached for project. Please wait for existing tasks to complete.`
- );
+ if (!agentRun || !agentRun.sessionId) {
+ throw new Error('Cannot resume task: task has no active agent run with session_id');
}
- // Extract original prompt from the original run
- let originalPrompt = '';
- try {
- const originalInputJson = JSON.parse(agentRun.inputJson) as {
- prompt?: string;
- config?: AgentConfig;
- };
- originalPrompt = originalInputJson.prompt || '';
-
- // Fallback to inputSummary if prompt is not available
- if (!originalPrompt && agentRun.inputSummary) {
- originalPrompt = agentRun.inputSummary;
- }
- } catch {
- // If JSON parsing fails, use inputSummary as fallback
- if (agentRun.inputSummary) {
- originalPrompt = agentRun.inputSummary;
- }
+ const workItem = await workItemsRepository.findById(task.workItemId);
+ if (!workItem) {
+ throw new Error('WorkItem not found');
}
- // Build resume prompt using markdown format
- const combinedPrompt = PromptBuilder.buildResumePrompt(
- originalPrompt || '',
- prompt,
- workItem.title
+ // Emit workitem.task.resume event to trigger workflow
+ console.log(
+ `[AgentService] Emitting workitem.task.resume event to resume task for ${workItem.id}`
);
- // Create new agent run record linked to the original
- const newRunId = uuidv4();
- const newAgentRun = await agentRunsRepository.create({
- id: newRunId,
+ await workflowEventBus.emit({
+ eventId: crypto.randomUUID(),
+ at: new Date().toISOString(),
+ subject: { kind: 'workitem', id: workItem.id },
+ type: 'workitem.task.resume',
workItemId: workItem.id,
- projectId: project.id,
- agentKey: agentType,
- inputSummary: combinedPrompt ? combinedPrompt.substring(0, 200) : undefined,
- inputJson: JSON.stringify({
- prompt: combinedPrompt,
- originalPrompt,
- newPrompt: prompt,
- config,
- }),
- sessionId: agentRun.sessionId, // Reuse the same session_id
- linkedAgentRunId: agentRunId, // Link to the original run
- });
-
- // Mark as running
- await agentRunsRepository.update(newRunId, {
- status: 'running',
- startedAt: new Date(),
- });
-
- // Track as running
- this.trackRunningTask(project.id, newRunId);
-
- // Execute agent with session continuation
- adapter
- .correctWithReviewComments({
- worktreePath: workItem.worktreePath || '',
- agentRunId: newRunId,
+ data: {
+ taskId: task.id,
+ originalAgentRunId: agentRun.id,
sessionId: agentRun.sessionId,
- reviewComments: combinedPrompt,
- config,
- })
- .catch(async (error: unknown) => {
- await agentRunsRepository.update(newRunId, {
- status: 'failed',
- log: `Failed to resume agent process: ${error instanceof Error ? error.message : String(error)}`,
- finishedAt: new Date(),
- });
- this.untrackRunningTask(project.id, newRunId);
- await workItemsRepository.releaseLock(workItem.id, newRunId);
- });
+ prompt,
+ title: workItem.title,
+ body: workItem.body ?? '',
+ },
+ });
- return newAgentRun;
+ // Return the original agent run (workflow will create a new one)
+ // This maintains API compatibility while letting workflow handle the resume
+ return agentRun;
}
/**
* Restart a task with the same prompt
+ * Canonical action: restart task. Emits workitem.restarted; workflow runs from
+ * workitem_restarted → process_workitem (agent).
*/
- async restartTask(agentRunId: string): Promise {
- const agentRun = await agentRunsRepository.findById(agentRunId);
- if (!agentRun) {
- throw new Error('Agent run not found');
+ async restartTask(taskId: string): Promise {
+ const { tasksRepository } = await import('../../repositories/TasksRepository.js');
+ const task = await tasksRepository.findById(taskId);
+
+ if (!task) {
+ throw new Error('Task not found');
}
- const workItem = await workItemsRepository.findById(agentRun.workItemId);
+ const workItem = await workItemsRepository.findById(task.workItemId);
if (!workItem) {
throw new Error('WorkItem not found');
}
- const project = await projectsRepository.findById(workItem.projectId);
- if (!project) {
- throw new Error('Project not found');
- }
-
- // Parse original input and extract prompt
- let prompt: string;
- try {
- const inputJson = JSON.parse(agentRun.inputJson) as { prompt?: string; config?: AgentConfig };
- // Try to get prompt from inputJson
- prompt = inputJson.prompt || '';
-
- // Fallback to inputSummary if prompt is not available
- if (!prompt && agentRun.inputSummary) {
- prompt = agentRun.inputSummary;
- }
-
- // Final fallback to workItem title
- if (!prompt && workItem.title) {
- prompt = workItem.title;
- }
-
- // If still no prompt, throw an error
- if (!prompt) {
- throw new Error('Cannot restart task: original prompt not found');
- }
- } catch (error) {
- // If JSON parsing fails or prompt extraction fails, use fallbacks
- if (agentRun.inputSummary) {
- prompt = agentRun.inputSummary;
- } else if (workItem.title) {
- prompt = workItem.title;
- } else {
- throw new Error('Cannot restart task: no prompt available');
- }
- }
+ console.log(`[AgentService] Emitting workitem.restarted event for ${workItem.id}`);
- const agentParams = this.parseAgentParams(project.agentParams);
+ await workflowEventBus.emit({
+ eventId: crypto.randomUUID(),
+ at: new Date().toISOString(),
+ subject: { kind: 'workitem', id: workItem.id },
+ type: 'workitem.restarted',
+ workItemId: workItem.id,
+ data: {
+ taskId: task.id,
+ taskType: task.taskType,
+ title: workItem.title,
+ body: workItem.body ?? '',
+ },
+ });
- // Start new agent run
- return await this.startAgentRun(workItem, project, prompt, agentParams);
+ return task;
}
/**
* Get task status
*/
- async getTaskStatus(agentRunId: string): Promise<{ status: string; agentRun: AgentRun }> {
- const agentRun = await agentRunsRepository.findById(agentRunId);
- if (!agentRun) {
- throw new Error('Agent run not found');
- }
-
- const adapter = this.getAgentAdapter(agentRun.agentKey as AgentType);
- const { status } = await adapter.getStatus(agentRunId);
-
- // Update status in database if it changed
- if (status !== agentRun.status) {
- await agentRunsRepository.update(agentRunId, {
- status,
- finishedAt: ['succeeded', 'failed', 'cancelled'].includes(status) ? new Date() : undefined,
- });
+ async getTaskStatus(
+ taskId: string
+ ): Promise<{ status: string; task: Task; agentRun?: AgentRun | null }> {
+ const { tasksRepository } = await import('../../repositories/TasksRepository.js');
+ const task = await tasksRepository.findById(taskId);
+ if (!task) {
+ throw new Error('Task not found');
+ }
+
+ let agentRun: AgentRun | null = null;
+ if (task.currentAgentRunId) {
+ agentRun = (await agentRunsRepository.findById(task.currentAgentRunId)) ?? null;
+ if (agentRun && agentRun.status === AGENT_RUN_STATUS_RUNNING) {
+ const adapter = this.getAgentAdapter(agentRun.agentKey as AgentType);
+ const { status } = await adapter.getStatus(agentRun.id);
+ const statusForCheck = status;
+
+ if (status !== agentRun.status) {
+ await agentRunsRepository.update(agentRun.id, {
+ status,
+ finishedAt: ['succeeded', 'failed', 'cancelled'].includes(status)
+ ? new Date()
+ : undefined,
+ });
- // Untrack if task is no longer running
- if (status !== 'running') {
- const workItem = await workItemsRepository.findById(agentRun.workItemId);
- if (workItem) {
- this.untrackRunningTask(workItem.projectId, agentRunId);
- await workItemsRepository.releaseLock(workItem.id, agentRunId);
+ // Update task status based on agent run status
+ if (status === 'succeeded') {
+ await tasksRepository.updateStatus(task.id, 'succeeded');
+ } else if (status === 'failed' || status === 'cancelled') {
+ await tasksRepository.updateStatus(task.id, 'failed');
+ }
+
+ if (statusForCheck !== AGENT_RUN_STATUS_RUNNING) {
+ const workItem = await workItemsRepository.findById(task.workItemId);
+ if (workItem) {
+ this.untrackRunningTask(workItem.projectId, agentRun.id);
+ await workItemsRepository.releaseLock(task.workItemId, agentRun.id);
+ }
+ }
+
+ agentRun = {
+ ...agentRun,
+ status,
+ };
}
}
}
+ const updatedTask = await tasksRepository.findById(taskId);
return {
- status,
- agentRun,
+ status: updatedTask?.status || task.status,
+ task: updatedTask || task,
+ agentRun: agentRun ?? null,
};
}
@@ -622,16 +562,25 @@ export class AgentService {
for (const workItem of workItems) {
const runs = await agentRunsRepository.findByWorkItemId(workItem.id);
- allAgentRuns.push(...runs.filter((run) => run.status === 'running'));
+ allAgentRuns.push(...runs.filter((run) => run.status === AGENT_RUN_STATUS_RUNNING));
}
return allAgentRuns;
}
+ /**
+ * Get all tasks for a work item
+ * Returns Tasks (Domain resources), not AgentRuns
+ */
+ async getWorkItemTasks(workItemId: string): Promise {
+ const { tasksRepository } = await import('../../repositories/TasksRepository.js');
+ return await tasksRepository.findByWorkItemId(workItemId);
+ }
+
/**
* Get all agent runs for a work item
*/
- async getWorkItemTasks(workItemId: string): Promise {
+ async getWorkItemAgentRuns(workItemId: string): Promise {
return await agentRunsRepository.findByWorkItemId(workItemId);
}
@@ -691,7 +640,7 @@ export class AgentService {
// Mark as running
await agentRunsRepository.update(newRunId, {
- status: 'running',
+ status: AGENT_RUN_STATUS_RUNNING,
startedAt: new Date(),
});
@@ -744,21 +693,19 @@ export class AgentService {
const existingStatus = agentRun.status;
try {
- // Stage all changes first (including new files)
- // This is necessary because new files won't show up in git diff until staged
- gitService.stageAllChanges(workItem.worktreePath);
+ // Agent is expected to commit files itself, so we don't stage or commit automatically
+ // Just check what the agent has already committed
+ const headShaAfter = gitService.getHeadSha(workItem.worktreePath);
+ const headShaBefore = agentRun.headShaBefore || workItem.baseSha || headShaAfter;
- // Check if there are staged changes after staging
- const hasStagedChanges = gitService.hasStagedChanges(workItem.worktreePath);
+ // Check if agent made any new commits
+ const hasNewCommits = headShaBefore !== headShaAfter;
let commitSha: string | null = null;
- let headShaAfter: string;
- if (hasStagedChanges) {
- // Commit if changes exist
- const commitMessage = `AgentRun ${agentRunId}: ${agentRun.inputSummary || 'Agent execution'}`;
- commitSha = gitService.commitChanges(workItem.worktreePath, commitMessage);
- headShaAfter = gitService.getHeadSha(workItem.worktreePath);
+ if (hasNewCommits) {
+ // Agent has made commits - use the latest commit SHA
+ commitSha = headShaAfter;
// Check if there's an actual diff between base and head (to avoid creating PRs with no changes)
if (!workItem.baseSha) {
@@ -767,12 +714,10 @@ export class AgentService {
const diff = gitService.getDiff(workItem.baseSha, headShaAfter, workItem.worktreePath);
const hasActualChanges = diff.trim().length > 0;
- if (hasActualChanges) {
- // Create PR only if there are actual changes
- await this.openPRForWorkItem(workItem, project);
- } else {
- // No actual changes in diff - close any existing PR and update agent run log
- await this.closeExistingPRIfNoDiff(workItem, headShaAfter);
+ // Don't automatically create PR - workflow will handle PR creation
+ // Just return information about whether changes exist
+ if (!hasActualChanges) {
+ // No actual changes in diff - update agent run log
const noChangesMessage =
'\n\n[Finalization] No changes detected in diff - PR creation skipped.';
await agentRunsRepository.update(agentRunId, {
@@ -780,29 +725,68 @@ export class AgentService {
});
}
} else {
- // No staged changes - close any existing PR and update agent run log
- headShaAfter = gitService.getHeadSha(workItem.worktreePath);
- await this.closeExistingPRIfNoDiff(workItem, headShaAfter);
- const noChangesMessage = '\n\n[Finalization] No changes detected - PR creation skipped.';
- await agentRunsRepository.update(agentRunId, {
- log: (agentRun.log ?? '') + noChangesMessage,
- });
+ // No new commits from agent - check if there are unstaged changes
+ const hasUnstagedChanges = gitService.hasUnstagedChanges(workItem.worktreePath);
+ const hasStagedChanges = gitService.hasStagedChanges(workItem.worktreePath);
+
+ if (hasUnstagedChanges || hasStagedChanges) {
+ // Agent didn't commit changes but there are changes present
+ const noCommitMessage =
+ '\n\n[Finalization] Agent did not commit changes, but changes are present in working directory.';
+ await agentRunsRepository.update(agentRunId, {
+ log: (agentRun.log ?? '') + noCommitMessage,
+ });
+ } else {
+ // No changes at all
+ const noChangesMessage = '\n\n[Finalization] No changes detected - PR creation skipped.';
+ await agentRunsRepository.update(agentRunId, {
+ log: (agentRun.log ?? '') + noChangesMessage,
+ });
+ }
}
// Update AgentRun - preserve existing status unless finalization fails
await agentRunsRepository.update(agentRunId, {
- // Only update status if it's still 'running' (shouldn't happen, but be safe)
- // Otherwise preserve the status set by the adapter (succeeded/failed)
- status: existingStatus === 'running' ? 'succeeded' : existingStatus,
+ status:
+ existingStatus === AGENT_RUN_STATUS_RUNNING ? AGENT_RUN_STATUS_SUCCEEDED : existingStatus,
finishedAt: agentRun.finishedAt || new Date(),
headShaAfter,
commitSha,
});
- // Update WorkItem cached head SHA
- await workItemsRepository.update(workItem.id, {
- headSha: headShaAfter,
- });
+ const status =
+ agentRun.status === 'succeeded'
+ ? 'succeeded'
+ : agentRun.status === 'failed'
+ ? 'failed'
+ : agentRun.status === 'cancelled'
+ ? 'canceled'
+ : null;
+ const outcomeStatus: 'succeeded' | 'failed' | 'canceled' = status ?? 'failed';
+ const outcome = {
+ resourceType: 'AgentRun' as const,
+ resourceId: agentRunId,
+ status: outcomeStatus,
+ summary: `AgentRun ${agentRunId} completed (${status ?? 'unknown'})`,
+ outputs: {
+ agentRunId,
+ taskId: agentRun.taskId,
+ sessionId: agentRun.sessionId,
+ commitSha,
+ headShaAfter,
+ },
+ };
+ const complete = getAndRemoveAgentRunCompletionCallback(agentRunId);
+ if (complete) {
+ try {
+ await complete(outcome);
+ } catch (error) {
+ console.error(
+ `[AgentService] Failed to complete NodeRun for AgentRun ${agentRunId}:`,
+ error
+ );
+ }
+ }
// PR head SHA is tracked in WorkItem, not in PR schema
// PR only stores sourceBranch and targetBranch references
diff --git a/backend/src/services/ClaudeCodeAgentAdapter.ts b/backend/src/services/agent/ClaudeCodeAgentAdapter.ts
similarity index 85%
rename from backend/src/services/ClaudeCodeAgentAdapter.ts
rename to backend/src/services/agent/ClaudeCodeAgentAdapter.ts
index b3da44e..9ac13fe 100644
--- a/backend/src/services/ClaudeCodeAgentAdapter.ts
+++ b/backend/src/services/agent/ClaudeCodeAgentAdapter.ts
@@ -1,9 +1,14 @@
+import { v4 as uuidv4 } from 'uuid';
+import path from 'node:path';
+import { homedir } from 'node:os';
+import { promises as fs } from 'node:fs';
import {
AgentAdapter,
type AgentModel,
type AgentRunParams,
type AgentCorrectionParams,
} from './AgentAdapter.js';
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
interface ClaudeCodeSession {
id: string;
@@ -92,7 +97,6 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
const { logBuffer, append } = this.createOutputHandler(logFile);
// Get sessionId from the agent run record
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
const agentRun = await agentRunsRepository.findById(runId);
let sessionId = agentRun?.sessionId;
@@ -100,7 +104,6 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
// If sessionId is not a valid UUID (e.g., starts with "wi-"), generate a new UUID
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
if (!sessionId || !uuidRegex.test(sessionId)) {
- const { v4: uuidv4 } = await import('uuid');
sessionId = uuidv4();
// Update the database with the generated UUID session ID
await agentRunsRepository.update(runId, {
@@ -113,7 +116,7 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
const args = this.buildCommandArgs('-p', {
model: config.model,
agent: config.agent,
- 'session-id': sessionId,
+ 'session-id': sessionId ?? undefined,
});
if (config.baseArgs) {
@@ -125,6 +128,15 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
const child = this.spawnProcess(config.executablePath, args, { cwd: worktreePath });
+ // Store PID in memory cache and persist to database
+ if (child.pid) {
+ this.processPids.set(runId, child.pid);
+ await agentRunsRepository.update(runId, {
+ pid: child.pid,
+ });
+ console.log(`[ClaudeCodeAgent] Stored PID ${child.pid} for run ${runId}`);
+ }
+
child.stdout?.on('data', append);
child.stderr?.on('data', append);
@@ -163,6 +175,15 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
const child = this.spawnProcess(config.executablePath, args, { cwd: worktreePath });
+ // Store PID in memory cache and persist to database
+ if (child.pid) {
+ this.processPids.set(runId, child.pid);
+ await agentRunsRepository.update(runId, {
+ pid: child.pid,
+ });
+ console.log(`[ClaudeCodeAgent] Stored PID ${child.pid} for correction run ${runId}`);
+ }
+
child.stdout?.on('data', append);
child.stderr?.on('data', append);
@@ -183,9 +204,6 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
try {
// Claude Code stores sessions in ~/.claude/sessions
// We can list them by reading the directory
- const { homedir } = await import('node:os');
- const { promises: fs } = await import('node:fs');
- const path = await import('node:path');
const sessionsDir = path.join(homedir(), '.claude', 'sessions');
const entries = await fs.readdir(sessionsDir, { withFileTypes: true });
@@ -203,12 +221,12 @@ export class ClaudeCodeAgentAdapter extends AgentAdapter {
sessions.push({
id: entry.name,
- name: sessionData.name || sessionData.title || entry.name,
- createdAt: sessionData.createdAt,
- updatedAt: sessionData.updatedAt,
- status: sessionData.status,
- model: sessionData.model,
- agent: sessionData.agent,
+ name: (sessionData.name || sessionData.title || entry.name) ?? undefined,
+ createdAt: sessionData.createdAt ?? undefined,
+ updatedAt: sessionData.updatedAt ?? undefined,
+ status: sessionData.status ?? undefined,
+ model: sessionData.model ?? undefined,
+ agent: sessionData.agent ?? undefined,
});
} catch {
// Skip sessions that can't be read
diff --git a/backend/src/services/OpenCodeAgentAdapter.ts b/backend/src/services/agent/OpenCodeAgentAdapter.ts
similarity index 68%
rename from backend/src/services/OpenCodeAgentAdapter.ts
rename to backend/src/services/agent/OpenCodeAgentAdapter.ts
index fe0c22e..1ba276e 100644
--- a/backend/src/services/OpenCodeAgentAdapter.ts
+++ b/backend/src/services/agent/OpenCodeAgentAdapter.ts
@@ -1,3 +1,4 @@
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
import {
AgentAdapter,
type AgentModel,
@@ -55,7 +56,7 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
async getModels(): Promise {
console.log('[OpenCodeAgent] Fetching available models...');
try {
- const { stdout } = this.execCommand('opencode models');
+ const { stdout } = await this.execCommandAsync('opencode models');
const models = this.parseModelsFromOutput(stdout);
console.log(`[OpenCodeAgent] Found ${models.length} available models`);
return models;
@@ -66,7 +67,7 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
}
async run(params: OpenCodeAgentRunParams): Promise<{ runId: string; sessionId?: string }> {
- const { worktreePath, agentRunId, prompt, config } = params;
+ const { worktreePath, agentRunId, prompt, config, sessionId } = params;
const runId = agentRunId;
console.log(`[OpenCodeAgent] Starting run ${runId}`);
@@ -76,13 +77,43 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
console.log(`[OpenCodeAgent] Prompt length: ${prompt.length} characters`);
try {
+ // When reusing a session (e.g. craft_commit), use it and pass --session to opencode
+ let initialSessionId: string | null = null;
+ if (sessionId) {
+ initialSessionId = sessionId;
+ console.log(`[OpenCodeAgent] Reusing session: ${sessionId}`);
+ } else {
+ try {
+ console.log(`[OpenCodeAgent] Listing sessions before starting run ${runId}...`);
+ const sessions = await this.listSessions(worktreePath);
+ if (sessions.length > 0) {
+ const latestSession = sessions[0]; // Most recent session
+ initialSessionId = latestSession.id;
+ console.log(
+ `[OpenCodeAgent] Found existing session: ${initialSessionId}, persisting...`
+ );
+ this.cacheSession(runId, latestSession);
+ await this.saveSessionToDatabase(runId, latestSession);
+ await agentRunsRepository.update(runId, {
+ sessionId: initialSessionId,
+ });
+ console.log(
+ `[OpenCodeAgent] Persisted existing session ${initialSessionId} to database`
+ );
+ } else {
+ console.log(`[OpenCodeAgent] No existing sessions found, sessionId will be null`);
+ }
+ } catch (error) {
+ console.error(`[OpenCodeAgent] Failed to list sessions before start:`, error);
+ }
+ }
+
const { stdoutFile, stderrFile } = await this.createStdoutStderrFiles(runId);
const stdoutPath = await this.getStdoutPath(runId);
const stderrPath = await this.getStderrPath(runId);
console.log(`[OpenCodeAgent] Log files created: stdout=${stdoutPath}, stderr=${stderrPath}`);
// Update database with log file paths immediately so SSE streaming can work
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
await agentRunsRepository.update(runId, {
stdoutPath,
stderrPath,
@@ -92,10 +123,11 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
const { stdoutBuffer, stderrBuffer, appendStdout, appendStderr } =
this.createStdoutStderrHandlers(stdoutFile, stderrFile);
- // Build args for opencode run command
+ // Build args for opencode run command (pass session when reusing for craft_commit etc.)
const args = this.buildCommandArgs('run', {
model: config.model,
agent: config.agent,
+ session: sessionId || undefined,
});
if (config.baseArgs) {
@@ -123,6 +155,61 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
const child = this.spawnProcess(config.executablePath, args, { cwd: worktreePath });
console.log(`[OpenCodeAgent] Process spawned with PID: ${child.pid}`);
+ // Store PID in memory cache and persist to database
+ if (child.pid) {
+ this.processPids.set(runId, child.pid);
+ await agentRunsRepository.update(runId, {
+ pid: child.pid,
+ });
+ console.log(`[OpenCodeAgent] Stored PID ${child.pid} for run ${runId}`);
+ }
+
+ // Start session polling to persist session updates during execution
+ // Poll until we get a new session (different from initial) or task ends
+ let sessionPollingInterval: NodeJS.Timeout | null = null;
+ const startSessionPolling = () => {
+ sessionPollingInterval = setInterval(async () => {
+ try {
+ // Check if process is still running
+ if (!child.pid || child.killed) {
+ console.log(`[OpenCodeAgent] Process no longer running, stopping session polling`);
+ if (sessionPollingInterval) {
+ clearInterval(sessionPollingInterval);
+ sessionPollingInterval = null;
+ }
+ return;
+ }
+
+ const sessions = await this.listSessions(worktreePath);
+ if (sessions.length > 0) {
+ const latestSession = sessions[0];
+ // If we found a new session (different from initial), persist it and stop polling
+ if (latestSession.id !== initialSessionId) {
+ console.log(
+ `[OpenCodeAgent] New session detected: ${latestSession.id} (was: ${initialSessionId})`
+ );
+ this.cacheSession(runId, latestSession);
+ await this.saveSessionToDatabase(runId, latestSession);
+ await agentRunsRepository.update(runId, {
+ sessionId: latestSession.id,
+ });
+ console.log(
+ `[OpenCodeAgent] Persisted new session ${latestSession.id} to database, stopping polling`
+ );
+ // Stop polling once we found the new session
+ if (sessionPollingInterval) {
+ clearInterval(sessionPollingInterval);
+ sessionPollingInterval = null;
+ }
+ }
+ }
+ } catch (error) {
+ console.error(`[OpenCodeAgent] Error during session polling:`, error);
+ }
+ }, 5000); // Poll every 5 seconds
+ };
+ startSessionPolling();
+
let outputCount = 0;
child.stdout?.on('data', (chunk) => {
outputCount++;
@@ -142,6 +229,13 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
});
child.on('close', async (code) => {
+ // Stop session polling
+ if (sessionPollingInterval) {
+ clearInterval(sessionPollingInterval);
+ sessionPollingInterval = null;
+ console.log(`[OpenCodeAgent] Stopped session polling for run ${runId}`);
+ }
+
console.log(`[OpenCodeAgent] Run ${runId} process closed with exit code: ${code}`);
console.log(`[OpenCodeAgent] Total output chunks received: ${outputCount}`);
@@ -154,8 +248,8 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
stdoutFile,
stderrFile,
async () => {
- // List sessions and save the latest one
- console.log(`[OpenCodeAgent] Listing sessions for run ${runId}...`);
+ // Final session check and save
+ console.log(`[OpenCodeAgent] Final session check for run ${runId}...`);
try {
const sessions = await this.listSessions(worktreePath);
console.log(`[OpenCodeAgent] Found ${sessions.length} sessions`);
@@ -165,15 +259,21 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
this.cacheSession(runId, latestSession);
await this.saveSessionToDatabase(runId, latestSession);
// Update the sessionId field in the database with the actual opencode session ID
- const { agentRunsRepository } =
- await import('../repositories/AgentRunsRepository.js');
await agentRunsRepository.update(runId, {
sessionId: latestSession.id,
});
- console.log(`[OpenCodeAgent] Session ID updated in database: ${latestSession.id}`);
+ console.log(
+ `[OpenCodeAgent] Final session ID updated in database: ${latestSession.id}`
+ );
+ } else {
+ // No session found - ensure it's set to null
+ await agentRunsRepository.update(runId, {
+ sessionId: null,
+ });
+ console.log(`[OpenCodeAgent] No session found, set sessionId to null`);
}
} catch (error) {
- console.error('[OpenCodeAgent] Failed to list sessions:', error);
+ console.error('[OpenCodeAgent] Failed to list sessions on close:', error);
}
}
);
@@ -210,7 +310,6 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
console.log(`[OpenCodeAgent] Log files created: stdout=${stdoutPath}, stderr=${stderrPath}`);
// Update database with log file paths immediately so SSE streaming can work
- const { agentRunsRepository } = await import('../repositories/AgentRunsRepository.js');
await agentRunsRepository.update(runId, {
stdoutPath,
stderrPath,
@@ -238,6 +337,15 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
const child = this.spawnProcess(config.executablePath, args, { cwd: worktreePath });
console.log(`[OpenCodeAgent] Correction process spawned with PID: ${child.pid}`);
+ // Store PID in memory cache and persist to database
+ if (child.pid) {
+ this.processPids.set(runId, child.pid);
+ await agentRunsRepository.update(runId, {
+ pid: child.pid,
+ });
+ console.log(`[OpenCodeAgent] Stored PID ${child.pid} for correction run ${runId}`);
+ }
+
let outputCount = 0;
child.stdout?.on('data', (chunk) => {
outputCount++;
@@ -289,7 +397,7 @@ export class OpenCodeAgentAdapter extends AgentAdapter {
async listSessions(worktreePath: string): Promise {
console.log(`[OpenCodeAgent] Listing sessions for worktree: ${worktreePath}`);
try {
- const { stdout } = this.execCommand('opencode session list --format json', {
+ const { stdout } = await this.execCommandAsync('opencode session list --format json', {
cwd: worktreePath,
});
diff --git a/backend/src/services/agent/index.ts b/backend/src/services/agent/index.ts
new file mode 100644
index 0000000..9763a7f
--- /dev/null
+++ b/backend/src/services/agent/index.ts
@@ -0,0 +1,21 @@
+export {
+ AgentAdapter,
+ type AgentModel,
+ type AgentRunParams,
+ type AgentCorrectionParams,
+ type AgentStatus,
+ type AgentConfig,
+ type ProcessOutput,
+ type SessionData,
+} from './AgentAdapter.js';
+export { AgentRunRecoveryService, agentRunRecoveryService } from './AgentRunRecoveryService.js';
+export {
+ AgentService,
+ agentService,
+ type AgentType,
+ type AgentConfig as AgentServiceConfig,
+ type AgentParams,
+ type TaskExecutionResult,
+} from './AgentService.js';
+export { ClaudeCodeAgentAdapter, claudeCodeAgentAdapter } from './ClaudeCodeAgentAdapter.js';
+export { OpenCodeAgentAdapter, openCodeAgentAdapter } from './OpenCodeAgentAdapter.js';
diff --git a/backend/src/services/GitCommitService.ts b/backend/src/services/git/GitCommitService.ts
similarity index 100%
rename from backend/src/services/GitCommitService.ts
rename to backend/src/services/git/GitCommitService.ts
diff --git a/backend/src/services/GitFileService.ts b/backend/src/services/git/GitFileService.ts
similarity index 100%
rename from backend/src/services/GitFileService.ts
rename to backend/src/services/git/GitFileService.ts
diff --git a/backend/src/services/git/GitMirrorService.ts b/backend/src/services/git/GitMirrorService.ts
new file mode 100644
index 0000000..4d9453a
--- /dev/null
+++ b/backend/src/services/git/GitMirrorService.ts
@@ -0,0 +1,261 @@
+import fs from 'node:fs/promises';
+import path from 'node:path';
+
+/**
+ * Service for managing bare Git mirror repositories
+ * Mirror repos act as an intermediate layer between source and relay repos
+ */
+export class GitMirrorService {
+ constructor(
+ private execCommand: (command: string, cwd: string) => string,
+ private getDefaultBranch: (repoPath: string) => string
+ ) {}
+
+ /**
+ * Get the path for a mirror repo based on source repo path
+ * Multiple projects with the same source path share the same mirror repo
+ * Uses a hash of the normalized source path to create a unique identifier
+ */
+ getMirrorRepoPath(mirrorsDir: string, sourceRepoPath: string): string {
+ // Normalize the source path to handle different path formats
+ const normalizedPath = path.resolve(sourceRepoPath).replace(/\\/g, '/');
+
+ // Create a hash from the normalized path
+ // Using a simple hash function (could use crypto.createHash for stronger hashing)
+ let hash = 0;
+ for (let i = 0; i < normalizedPath.length; i++) {
+ const char = normalizedPath.charCodeAt(i);
+ hash = (hash << 5) - hash + char;
+ hash = hash & hash; // Convert to 32-bit integer
+ }
+
+ // Use absolute value and convert to hex for filename-safe string
+ const hashStr = Math.abs(hash).toString(16).padStart(8, '0');
+
+ // Create a safe directory name from the last part of the path
+ const pathParts = normalizedPath.split('/').filter((p) => p.length > 0);
+ const lastPart = pathParts[pathParts.length - 1] || 'repo';
+ const safeName = lastPart.replace(/[^a-zA-Z0-9._-]/g, '_');
+
+ // Combine hash and safe name for uniqueness and readability
+ return path.join(mirrorsDir, `${safeName}-${hashStr}.git`);
+ }
+
+ /**
+ * Ensure mirror repo exists and is initialized as a bare repository
+ * If it doesn't exist, create it from the source repo
+ * Multiple projects with the same source path will share the same mirror repo
+ */
+ async ensureMirrorRepo(mirrorsDir: string, sourceRepoPath: string): Promise {
+ // Ensure mirrors directory exists
+ await fs.mkdir(mirrorsDir, { recursive: true });
+
+ const mirrorRepoPath = this.getMirrorRepoPath(mirrorsDir, sourceRepoPath);
+
+ // Check if mirror repo already exists
+ try {
+ await fs.access(mirrorRepoPath);
+ // Verify it's a valid bare repo
+ try {
+ const isBare = this.execCommand(
+ 'git rev-parse --is-bare-repository',
+ mirrorRepoPath
+ ).trim();
+ if (isBare !== 'true') {
+ // Not a bare repo, recreate it
+ await fs.rm(mirrorRepoPath, { recursive: true, force: true });
+ await this.createMirrorRepo(mirrorRepoPath, sourceRepoPath);
+ }
+ } catch {
+ // Not a valid git repo, recreate it
+ await fs.rm(mirrorRepoPath, { recursive: true, force: true });
+ await this.createMirrorRepo(mirrorRepoPath, sourceRepoPath);
+ }
+ } catch {
+ // Mirror repo doesn't exist, create it
+ await this.createMirrorRepo(mirrorRepoPath, sourceRepoPath);
+ }
+
+ return mirrorRepoPath;
+ }
+
+ /**
+ * Create a new bare mirror repository from source repo
+ */
+ private async createMirrorRepo(mirrorRepoPath: string, sourceRepoPath: string): Promise {
+ // Use git clone --bare to create mirror repo from source
+ // This is more reliable than init + fetch for local repos
+ const normalizedSourcePath = path.resolve(sourceRepoPath).replace(/\\/g, '/');
+ const sourceUrl =
+ process.platform === 'win32' ? normalizedSourcePath : `file://${normalizedSourcePath}`;
+
+ try {
+ // Try clone --bare (preferred method)
+ this.execCommand(`git clone --bare "${sourceUrl}" "${mirrorRepoPath}"`, process.cwd());
+ } catch {
+ // Fallback: create bare repo and fetch
+ await fs.mkdir(mirrorRepoPath, { recursive: true });
+ this.execCommand('git init --bare', mirrorRepoPath);
+
+ // Add source repo as remote using file:// protocol for local paths
+ const remoteUrl =
+ process.platform === 'win32' ? normalizedSourcePath : `file://${normalizedSourcePath}`;
+
+ try {
+ this.execCommand(`git remote add source "${remoteUrl}"`, mirrorRepoPath);
+ } catch {
+ // Remote might already exist, try to set URL
+ this.execCommand(`git remote set-url source "${remoteUrl}"`, mirrorRepoPath);
+ }
+
+ // Fetch all branches and tags from source
+ this.execCommand('git fetch source --all --tags', mirrorRepoPath);
+ }
+
+ // Get default branch from source
+ const defaultBranch = this.getDefaultBranch(sourceRepoPath);
+
+ // Set default branch in mirror
+ try {
+ this.execCommand(`git symbolic-ref HEAD refs/heads/${defaultBranch}`, mirrorRepoPath);
+ } catch {
+ // If default branch doesn't exist in mirror, use main/master
+ try {
+ this.execCommand('git symbolic-ref HEAD refs/heads/main', mirrorRepoPath);
+ } catch {
+ this.execCommand('git symbolic-ref HEAD refs/heads/master', mirrorRepoPath);
+ }
+ }
+ }
+
+ /**
+ * Push from source repo to mirror repo using namespaced refs
+ * Uses format: refs/heads/gv//tracking/
+ */
+ async pushSourceToMirror(
+ sourceRepoPath: string,
+ mirrorRepoPath: string,
+ branch: string,
+ projectId: string
+ ): Promise {
+ // Normalize paths for remote URL
+ const normalizedMirrorPath = path.resolve(mirrorRepoPath).replace(/\\/g, '/');
+ const mirrorUrl =
+ process.platform === 'win32' ? normalizedMirrorPath : `file://${normalizedMirrorPath}`;
+
+ // Ensure mirror remote exists in source repo
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, sourceRepoPath);
+ } catch {
+ // Remote exists, update URL
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, sourceRepoPath);
+ }
+
+ // Push branch to mirror using namespaced ref
+ const namespacedRef = `refs/heads/gv/${projectId}/tracking/${branch}`;
+ this.execCommand(`git push mirror ${branch}:${namespacedRef}`, sourceRepoPath);
+
+ // Push all tags
+ try {
+ this.execCommand('git push mirror --tags', sourceRepoPath);
+ } catch {
+ // No tags to push, continue
+ }
+ }
+
+ /**
+ * Fetch namespaced ref from mirror and update relay repo branch
+ * Uses explicit fetch + reset for deterministic behavior
+ */
+ async fetchMirrorRefToRelay(
+ mirrorRepoPath: string,
+ relayRepoPath: string,
+ branch: string,
+ projectId: string,
+ refType: 'tracking' | 'relay' = 'tracking'
+ ): Promise {
+ // Normalize paths for remote URL
+ const normalizedMirrorPath = path.resolve(mirrorRepoPath).replace(/\\/g, '/');
+ const mirrorUrl =
+ process.platform === 'win32' ? normalizedMirrorPath : `file://${normalizedMirrorPath}`;
+
+ // Ensure mirror remote exists in relay repo
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, relayRepoPath);
+ } catch {
+ // Remote exists, update URL
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, relayRepoPath);
+ }
+
+ // Build namespaced ref path
+ const namespacedRef =
+ refType === 'tracking'
+ ? `refs/heads/gv/${projectId}/tracking/${branch}`
+ : `refs/heads/gv/${projectId}/relay`;
+ const remoteRef = `refs/remotes/mirror/gv/${projectId}/${refType === 'tracking' ? `tracking/${branch}` : 'relay'}`;
+
+ // Fetch specific namespaced ref (explicit fetch, no pull)
+ this.execCommand(`git fetch mirror ${namespacedRef}:${remoteRef}`, relayRepoPath);
+
+ // Checkout branch if needed
+ try {
+ this.execCommand(`git checkout ${branch}`, relayRepoPath);
+ } catch {
+ // Branch doesn't exist locally, create it from mirror
+ this.execCommand(`git checkout -B ${branch} ${remoteRef}`, relayRepoPath);
+ }
+
+ // Reset to match mirror (deterministic, explicit reset)
+ this.execCommand(`git reset --hard ${remoteRef}`, relayRepoPath);
+ this.execCommand('git clean -fd', relayRepoPath);
+ }
+
+ /**
+ * Push relay integration branch to mirror repo using namespaced refs
+ * Uses format: refs/heads/gv//relay
+ */
+ async pushRelayToMirror(
+ relayRepoPath: string,
+ mirrorRepoPath: string,
+ branch: string,
+ projectId: string
+ ): Promise {
+ // Normalize paths for remote URL
+ const normalizedMirrorPath = path.resolve(mirrorRepoPath).replace(/\\/g, '/');
+ const mirrorUrl =
+ process.platform === 'win32' ? normalizedMirrorPath : `file://${normalizedMirrorPath}`;
+
+ // Ensure mirror remote exists in relay repo
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, relayRepoPath);
+ } catch {
+ // Remote exists, update URL
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, relayRepoPath);
+ }
+
+ // Push relay branch to mirror using namespaced ref
+ const namespacedRef = `refs/heads/gv/${projectId}/relay`;
+ this.execCommand(`git push mirror ${branch}:${namespacedRef}`, relayRepoPath);
+
+ // Push all tags
+ try {
+ this.execCommand('git push mirror --tags', relayRepoPath);
+ } catch {
+ // No tags to push, continue
+ }
+ }
+
+ /**
+ * Delete mirror repo
+ * Note: Only deletes if no other projects share this source path
+ * Caller should check if other projects use the same source path before deleting
+ */
+ async deleteMirrorRepo(mirrorsDir: string, sourceRepoPath: string): Promise {
+ const mirrorRepoPath = this.getMirrorRepoPath(mirrorsDir, sourceRepoPath);
+ try {
+ await fs.rm(mirrorRepoPath, { recursive: true, force: true });
+ } catch {
+ // Mirror repo may not exist, ignore error
+ }
+ }
+}
diff --git a/backend/src/services/git/GitRelayService.ts b/backend/src/services/git/GitRelayService.ts
new file mode 100644
index 0000000..71193aa
--- /dev/null
+++ b/backend/src/services/git/GitRelayService.ts
@@ -0,0 +1,219 @@
+import fs from 'node:fs/promises';
+import path from 'node:path';
+import { GitMirrorService } from './GitMirrorService.js';
+
+/**
+ * Service for Git relay repository operations
+ * Uses mirror repo as intermediate layer: source <-> mirror <-> relay
+ */
+export class GitRelayService {
+ private mirrorService: GitMirrorService;
+
+ constructor(
+ private execCommand: (command: string, cwd: string) => string,
+ private getDefaultBranch: (repoPath: string) => string,
+ private mirrorsDir: string
+ ) {
+ this.mirrorService = new GitMirrorService(execCommand, getDefaultBranch);
+ }
+
+ async createRelayRepo(
+ sourceRepoPath: string,
+ relayRepoPath: string,
+ mirrorRepoPath: string,
+ projectId: string,
+ branch?: string
+ ): Promise {
+ // Use provided branch or get the default branch from source repo
+ const defaultBranch = branch || this.getDefaultBranch(sourceRepoPath);
+
+ // Step 1: Ensure mirror repo exists (using provided mirrorRepoPath)
+ await this.mirrorService.ensureMirrorRepo(this.mirrorsDir, sourceRepoPath);
+
+ // Step 2: Push source default branch to mirror using namespaced ref
+ await this.mirrorService.pushSourceToMirror(
+ sourceRepoPath,
+ mirrorRepoPath,
+ defaultBranch,
+ projectId
+ );
+
+ // Step 3: Create relay repo directory
+ await fs.mkdir(relayRepoPath, { recursive: true });
+
+ // Step 4: Initialize relay repo
+ this.execCommand('git init', relayRepoPath);
+
+ // Step 5: Add mirror as remote
+ const normalizedMirrorPath = path.resolve(mirrorRepoPath).replace(/\\/g, '/');
+ const mirrorUrl =
+ process.platform === 'win32' ? normalizedMirrorPath : `file://${normalizedMirrorPath}`;
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, relayRepoPath);
+ } catch {
+ // Remote exists, update URL
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, relayRepoPath);
+ }
+
+ // Step 6: Fetch namespaced tracking ref from mirror
+ const namespacedRef = `refs/heads/gv/${projectId}/tracking/${defaultBranch}`;
+ const remoteRef = `refs/remotes/mirror/gv/${projectId}/tracking/${defaultBranch}`;
+ this.execCommand(`git fetch mirror ${namespacedRef}:${remoteRef}`, relayRepoPath);
+
+ // Step 7: Create local default branch from mirror tracking
+ this.execCommand(`git checkout -B ${defaultBranch} ${remoteRef}`, relayRepoPath);
+
+ // Step 8: Create relay integration branch from default
+ this.execCommand(`git checkout -B relay ${defaultBranch}`, relayRepoPath);
+
+ // Step 9: Push relay branch to mirror using namespaced ref
+ await this.mirrorService.pushRelayToMirror(relayRepoPath, mirrorRepoPath, 'relay', projectId);
+
+ // Step 10: Remove origin remote if it exists (to prevent accidental pushes)
+ try {
+ this.execCommand('git remote remove origin', relayRepoPath);
+ } catch {
+ // Origin remote may not exist, continue silently
+ }
+ }
+
+ async syncRelayToSource(
+ relayRepoPath: string,
+ sourceRepoPath: string,
+ mirrorRepoPath: string,
+ projectId: string
+ ): Promise {
+ // Get the default branch from source repo
+ const defaultBranch = this.getDefaultBranch(sourceRepoPath);
+
+ // Phase 0: Push relay integration branch to mirror (work branch merge into relay is done by workflow/PR merge before sync)
+ this.execCommand('git checkout relay', relayRepoPath);
+ await this.mirrorService.pushRelayToMirror(relayRepoPath, mirrorRepoPath, 'relay', projectId);
+
+ // Phase 1: Refresh mirror tracking/ from source (source → mirror)
+ // In source: fetch origin, checkout A, reset --hard origin/A; then push to mirror
+ try {
+ this.execCommand('git fetch origin --prune --tags', sourceRepoPath);
+ this.execCommand(`git checkout ${defaultBranch}`, sourceRepoPath);
+ this.execCommand(`git reset --hard origin/${defaultBranch}`, sourceRepoPath);
+ } catch {
+ // Origin may not exist or branch may not be tracked, continue
+ }
+
+ // Push source default branch to mirror using namespaced ref
+ await this.mirrorService.pushSourceToMirror(
+ sourceRepoPath,
+ mirrorRepoPath,
+ defaultBranch,
+ projectId
+ );
+
+ // Phase 2: Rebase/merge latest A into relay integration & resolve conflicts
+ // Fetch latest tracking A from mirror
+ const namespacedTrackingRef = `refs/heads/gv/${projectId}/tracking/${defaultBranch}`;
+ const remoteTrackingRef = `refs/remotes/mirror/gv/${projectId}/tracking/${defaultBranch}`;
+
+ const normalizedMirrorPath = path.resolve(mirrorRepoPath).replace(/\\/g, '/');
+ const mirrorUrl =
+ process.platform === 'win32' ? normalizedMirrorPath : `file://${normalizedMirrorPath}`;
+
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, relayRepoPath);
+ } catch {
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, relayRepoPath);
+ }
+
+ // Explicit fetch (no pull)
+ this.execCommand(
+ `git fetch mirror ${namespacedTrackingRef}:${remoteTrackingRef}`,
+ relayRepoPath
+ );
+
+ // Update local default branch to match mirror tracking
+ this.execCommand(`git checkout ${defaultBranch}`, relayRepoPath);
+ this.execCommand(`git reset --hard ${remoteTrackingRef}`, relayRepoPath);
+
+ // Merge default branch into relay
+ this.execCommand('git checkout relay', relayRepoPath);
+ try {
+ this.execCommand(
+ `git merge --no-ff ${defaultBranch} -m "Merge ${defaultBranch} into relay"`,
+ relayRepoPath
+ );
+ } catch {
+ // Merge conflict - commit if needed
+ const status = this.execCommand('git status --porcelain', relayRepoPath).trim();
+ if (status.length > 0) {
+ this.execCommand('git add -A', relayRepoPath);
+ this.execCommand('git commit -m "Resolve merge conflicts"', relayRepoPath);
+ }
+ }
+
+ // Push updated relay to mirror
+ await this.mirrorService.pushRelayToMirror(relayRepoPath, mirrorRepoPath, 'relay', projectId);
+
+ // Phase 3: Apply relay integration to source A
+ // Preflight: source working tree must be clean (design: no reset --hard on source)
+ const sourceStatus = this.execCommand('git status --porcelain', sourceRepoPath).trim();
+ if (sourceStatus.length > 0) {
+ throw new Error(
+ `Source repo has uncommitted changes. Please commit or stash before syncing. Output: ${sourceStatus.slice(0, 200)}`
+ );
+ }
+
+ // Fetch relay from mirror
+ const namespacedRelayRef = `refs/heads/gv/${projectId}/relay`;
+ const remoteRelayRef = `refs/remotes/mirror/gv/${projectId}/relay`;
+
+ try {
+ this.execCommand(`git remote add mirror "${mirrorUrl}"`, sourceRepoPath);
+ } catch {
+ this.execCommand(`git remote set-url mirror "${mirrorUrl}"`, sourceRepoPath);
+ }
+
+ // Explicit fetch (no pull)
+ this.execCommand(`git fetch mirror ${namespacedRelayRef}:${remoteRelayRef}`, sourceRepoPath);
+
+ // Ensure on default branch
+ this.execCommand(`git checkout ${defaultBranch}`, sourceRepoPath);
+
+ // Merge relay into default (no reset --hard, preserve working directory)
+ try {
+ this.execCommand(
+ `git merge --no-ff ${remoteRelayRef} -m "Merge relay into ${defaultBranch}"`,
+ sourceRepoPath
+ );
+ } catch {
+ // Merge conflict - this is expected in some cases
+ throw new Error(
+ `Merge conflict when merging relay into ${defaultBranch}. Please resolve conflicts manually.`
+ );
+ }
+
+ // Phase 4: Push updated source A to origin (if exists) and mirror
+ try {
+ this.execCommand(`git push origin ${defaultBranch}`, sourceRepoPath);
+ } catch {
+ // Origin may not exist, continue
+ }
+ await this.mirrorService.pushSourceToMirror(
+ sourceRepoPath,
+ mirrorRepoPath,
+ defaultBranch,
+ projectId
+ );
+
+ // Phase 5: Sync relay default branch to mirror tracking (design Phase 5)
+ await this.mirrorService.fetchMirrorRefToRelay(
+ mirrorRepoPath,
+ relayRepoPath,
+ defaultBranch,
+ projectId,
+ 'tracking'
+ );
+
+ // Return the commit SHA of the default branch in source repo
+ const commitSha = this.execCommand('git rev-parse HEAD', sourceRepoPath).trim();
+ return commitSha;
+ }
+}
diff --git a/backend/src/services/GitService.ts b/backend/src/services/git/GitService.ts
similarity index 85%
rename from backend/src/services/GitService.ts
rename to backend/src/services/git/GitService.ts
index 7253601..59c2827 100644
--- a/backend/src/services/GitService.ts
+++ b/backend/src/services/git/GitService.ts
@@ -1,12 +1,11 @@
import { execSync } from 'node:child_process';
import fs from 'node:fs/promises';
+import path from 'node:path';
import { GitWorktreeService } from './GitWorktreeService.js';
import { GitCommitService } from './GitCommitService.js';
import { GitFileService, type RepoFile } from './GitFileService.js';
import { GitRelayService } from './GitRelayService.js';
-
-// Re-export RepoFile interface for backward compatibility
-export type { RepoFile };
+import { STORAGE_CONFIG } from '../../config/storage.js';
/**
* Main Git service that provides a unified interface to all Git operations
@@ -25,7 +24,8 @@ export class GitService {
this.fileService = new GitFileService();
this.relayService = new GitRelayService(
this.execCommand.bind(this),
- this.getDefaultBranch.bind(this)
+ this.getDefaultBranch.bind(this),
+ STORAGE_CONFIG.mirrorsDir
);
}
@@ -86,6 +86,15 @@ export class GitService {
}
}
+ getRemoteUrl(repoPath: string, remote: string = 'origin'): string | null {
+ try {
+ const output = this.execCommand(`git remote get-url ${remote}`, repoPath).trim();
+ return output || null;
+ } catch {
+ return null;
+ }
+ }
+
getHeadSha(repoPath: string): string {
return this.execCommand('git rev-parse HEAD', repoPath).trim();
}
@@ -312,7 +321,7 @@ export class GitService {
if (worktreePath) {
const normalizedWorktreePath = normalizePath(worktreePath);
- // If we're already in the worktree that has this branch, checkout normally
+ // If we're already in a worktree that has this branch, checkout normally
if (normalizedWorktreePath === normalizedRepoPath) {
this.execCommand(`git checkout ${branch}`, repoPath);
return;
@@ -377,20 +386,63 @@ export class GitService {
// Relay Repository Operations (delegated to GitRelayService)
// ============================================================================
+ /**
+ * Get the mirror repo path for a given source repo path
+ * Multiple projects with the same source path share the same mirror repo
+ * This duplicates the logic from GitMirrorService.getMirrorRepoPath to avoid circular dependencies
+ */
+ getMirrorRepoPath(sourceRepoPath: string): string {
+ // Normalize the source path to handle different path formats
+ const normalizedPath = path.resolve(sourceRepoPath).replace(/\\/g, '/');
+
+ // Create a hash from the normalized path
+ let hash = 0;
+ for (let i = 0; i < normalizedPath.length; i++) {
+ const char = normalizedPath.charCodeAt(i);
+ hash = (hash << 5) - hash + char;
+ hash = hash & hash; // Convert to 32-bit integer
+ }
+
+ // Use absolute value and convert to hex for filename-safe string
+ const hashStr = Math.abs(hash).toString(16).padStart(8, '0');
+
+ // Create a safe directory name from the last part of the path
+ const pathParts = normalizedPath.split('/').filter((p) => p.length > 0);
+ const lastPart = pathParts[pathParts.length - 1] || 'repo';
+ const safeName = lastPart.replace(/[^a-zA-Z0-9._-]/g, '_');
+
+ // Combine hash and safe name for uniqueness and readability
+ return path.join(STORAGE_CONFIG.mirrorsDir, `${safeName}-${hashStr}.git`);
+ }
+
async createRelayRepo(
sourceRepoPath: string,
relayRepoPath: string,
+ mirrorRepoPath: string,
+ projectId: string,
branch?: string
): Promise {
- return this.relayService.createRelayRepo(sourceRepoPath, relayRepoPath, branch);
+ return this.relayService.createRelayRepo(
+ sourceRepoPath,
+ relayRepoPath,
+ mirrorRepoPath,
+ projectId,
+ branch
+ );
}
async syncRelayToSource(
relayRepoPath: string,
sourceRepoPath: string,
- projectName: string
+ mirrorRepoPath: string,
+ projectId: string
): Promise {
- return this.relayService.syncRelayToSource(relayRepoPath, sourceRepoPath, projectName);
+ return this.relayService.syncRelayToSource(
+ relayRepoPath,
+ sourceRepoPath,
+ mirrorRepoPath,
+ projectId
+ );
}
}
diff --git a/backend/src/services/GitWorktreeService.ts b/backend/src/services/git/GitWorktreeService.ts
similarity index 98%
rename from backend/src/services/GitWorktreeService.ts
rename to backend/src/services/git/GitWorktreeService.ts
index 47be860..9d319f4 100644
--- a/backend/src/services/GitWorktreeService.ts
+++ b/backend/src/services/git/GitWorktreeService.ts
@@ -1,5 +1,3 @@
-import { execSync } from 'node:child_process';
-
/**
* Service for managing Git worktrees
*/
diff --git a/backend/src/services/git/index.ts b/backend/src/services/git/index.ts
new file mode 100644
index 0000000..64ebddc
--- /dev/null
+++ b/backend/src/services/git/index.ts
@@ -0,0 +1,6 @@
+export { GitCommitService } from './GitCommitService.js';
+export { GitFileService, type RepoFile } from './GitFileService.js';
+export { GitMirrorService } from './GitMirrorService.js';
+export { GitRelayService } from './GitRelayService.js';
+export { GitService, gitService } from './GitService.js';
+export { GitWorktreeService } from './GitWorktreeService.js';
diff --git a/backend/src/services/workflow/WorkflowEventBus.ts b/backend/src/services/workflow/WorkflowEventBus.ts
new file mode 100644
index 0000000..b7a1e3f
--- /dev/null
+++ b/backend/src/services/workflow/WorkflowEventBus.ts
@@ -0,0 +1,252 @@
+/**
+ * WorkflowEventBus - Central event bus for workflow events
+ *
+ * Updated to use uniform event envelope format per optimized design:
+ * - eventId, type, at, subject, resourceVersion, causedBy, data
+ */
+
+import type { WorkflowEvent, EventSubject, EventCausedBy } from 'git-vibe-shared';
+import { v4 as uuidv4 } from 'uuid';
+
+// ============================================================================
+// Event Type Definitions
+// ============================================================================
+
+export type WorkItemEventType =
+ | 'workitem.created'
+ | 'workitem.updated'
+ | 'workitem.status.changed'
+ | 'workitem.closed'
+ | 'workitem.workspace.ready'
+ | 'workitem.task.start'
+ | 'workitem.task.resume'
+ | 'workitem.restarted';
+
+export type WorkflowNodeEventType =
+ | 'node.started'
+ | 'node.completed'
+ | 'agent.started'
+ | 'agent.completed'
+ | 'pr.created'
+ | 'pr.merged'
+ | 'git.committed'
+ | 'conflict.detected'
+ | 'workspace.initialized'
+ | 'workspace.ready'
+ | 'ci.checks.completed'
+ | 'command_run.completed'
+ | 'command_run.started'
+ | 'workflow.anchor.reached'
+ | 'task.resumeRequested'
+ | 'worktree.updated'
+ | 'workitem.merged';
+
+export type ExternalEventType =
+ | 'github.pr.created'
+ | 'github.pr.updated'
+ | 'github.pr.merged'
+ | 'ci.checks.updated'
+ | 'git.state.changed';
+
+export type DomainEventType =
+ | 'task.created'
+ | 'task.completed'
+ | 'task.started'
+ | 'task.resumeRequested'
+ | 'pr_request.created'
+ | 'pr_request.updated'
+ | 'pr_request.started'
+ | 'pr_request.mergeAttempted'
+ | 'pr_request.merged';
+
+export type WorkflowEventType =
+ | WorkItemEventType
+ | WorkflowNodeEventType
+ | ExternalEventType
+ | DomainEventType;
+
+export type ResourceEventType = WorkflowEventType;
+
+// ============================================================================
+// Event Payload Types
+// ============================================================================
+
+export interface WorkItemCreatedPayload {
+ projectId: string;
+ type: 'issue' | 'feature-request';
+ title: string;
+ body?: string;
+}
+
+export interface WorkItemUpdatedPayload {
+ title: string;
+ body: string;
+}
+
+export interface WorkItemStatusChangedPayload {
+ oldStatus: 'open' | 'closed';
+ newStatus: 'open' | 'closed';
+}
+
+export interface WorkItemWorkspaceReadyPayload {
+ worktreePath: string;
+ headBranch: string;
+}
+
+export interface WorkItemTaskStartPayload {
+ title: string;
+ body: string;
+ userMessage?: string;
+}
+
+export interface WorkItemTaskResumePayload {
+ originalAgentRunId: string;
+ sessionId: string;
+ prompt: string;
+ title: string;
+ body: string;
+}
+
+export interface WorkItemRestartedPayload {
+ originalAgentRunId: string;
+ title: string;
+ body: string;
+}
+
+// ============================================================================
+// Event Handler Type
+// ============================================================================
+
+export type EventHandler = (event: WorkflowEvent) => Promise | void;
+
+// ============================================================================
+// WorkflowEventBus
+// ============================================================================
+
+/**
+ * WorkflowEventBus - Central event bus for workflow events
+ * Uses uniform event envelope format per optimized design
+ */
+export class WorkflowEventBus {
+ private handlers: Map> = new Map();
+ private anyHandlers: Set = new Set();
+
+ /**
+ * Register an event handler
+ */
+ on(eventType: WorkflowEventType, handler: EventHandler): () => void {
+ if (!this.handlers.has(eventType)) {
+ this.handlers.set(eventType, new Set());
+ }
+ this.handlers.get(eventType)!.add(handler);
+
+ return () => {
+ this.handlers.get(eventType)?.delete(handler);
+ };
+ }
+
+ /**
+ * Register a handler for ALL events (best practice for event-driven workflow engines).
+ */
+ onAny(handler: EventHandler): () => void {
+ this.anyHandlers.add(handler);
+ return () => {
+ this.anyHandlers.delete(handler);
+ };
+ }
+
+ /**
+ * Emit an event using uniform event envelope format
+ */
+ async emit(event: WorkflowEvent): Promise {
+ const handlers = this.handlers.get(event.type);
+ const typedCount = handlers?.size ?? 0;
+ const anyCount = this.anyHandlers.size;
+ const hasHandlers = typedCount + anyCount > 0;
+
+ if (!hasHandlers) {
+ console.warn(`[WorkflowEventBus] No handlers registered for event type: ${event.type}`);
+ return;
+ }
+
+ console.log(
+ `[WorkflowEventBus] Emitting event ${event.type} (${event.eventId}) to ${typedCount + anyCount} handler(s)`
+ );
+
+ const allHandlers = [
+ ...(handlers ? Array.from(handlers) : []),
+ ...Array.from(this.anyHandlers),
+ ];
+
+ const promises = allHandlers.map((handler) => {
+ try {
+ return Promise.resolve(handler(event));
+ } catch (error) {
+ console.error(`Error in event handler for ${event.type}:`, error);
+ return Promise.resolve();
+ }
+ });
+
+ await Promise.all(promises);
+ console.log(`[WorkflowEventBus] Completed emitting event ${event.type} (${event.eventId})`);
+ }
+
+ /**
+ * Create a uniform event envelope
+ */
+ createEvent(
+ type: WorkflowEventType,
+ subject: EventSubject,
+ data: Record,
+ options?: {
+ resourceVersion?: number;
+ causedBy?: EventCausedBy;
+ }
+ ): WorkflowEvent {
+ return {
+ eventId: uuidv4(),
+ type,
+ at: new Date().toISOString(),
+ subject,
+ resourceVersion: options?.resourceVersion,
+ causedBy: options?.causedBy,
+ data,
+ };
+ }
+
+ /**
+ * Emit workitem events by type and workItemId
+ */
+ async emitWorkItemEvent(
+ type: WorkItemEventType,
+ workItemId: string,
+ data: Record,
+ options?: {
+ resourceVersion?: number;
+ causedBy?: EventCausedBy;
+ }
+ ): Promise {
+ const event = this.createEvent(type, { kind: 'workitem', id: workItemId }, data, options);
+ await this.emit(event);
+ }
+
+ /**
+ * Remove all listeners for an event type (or all events)
+ */
+ removeAllListeners(eventType?: string): void {
+ if (eventType) {
+ this.handlers.delete(eventType);
+ } else {
+ this.handlers.clear();
+ }
+ }
+
+ /**
+ * Get listener count for an event type
+ */
+ listenerCount(eventType: string): number {
+ return this.handlers.get(eventType)?.size ?? 0;
+ }
+}
+
+export const workflowEventBus = new WorkflowEventBus();
diff --git a/backend/src/services/workflow/WorkflowExecutionService.test.ts b/backend/src/services/workflow/WorkflowExecutionService.test.ts
new file mode 100644
index 0000000..be1eeb7
--- /dev/null
+++ b/backend/src/services/workflow/WorkflowExecutionService.test.ts
@@ -0,0 +1,594 @@
+/**
+ * Tests for WorkflowExecutionService - Workflow-driven execution
+ * Verifies that work item creation triggers workflow execution and all node executors work correctly
+ */
+
+import { describe, it, expect, beforeAll, beforeEach } from 'vitest';
+import { runMigrations } from '../../db/migrations.js';
+import { projectsRepository } from '../../repositories/ProjectsRepository.js';
+import { workItemsRepository } from '../../repositories/WorkItemsRepository.js';
+import { workflowsRepository } from '../../repositories/WorkflowsRepository.js';
+import { workItemEventService } from './../WorkItemEventService.js';
+import { workflowExecutionService } from './WorkflowExecutionService.js';
+import { v4 as uuidv4 } from 'uuid';
+import type { Workflow } from 'git-vibe-shared';
+import type { NodeRunRecord, WorkflowRunRecord } from '../../repositories/WorkflowsRepository.js';
+
+describe('WorkflowExecutionService - Workflow-driven execution', () => {
+ let testProjectId: string;
+
+ beforeAll(async () => {
+ await runMigrations();
+ });
+
+ beforeEach(async () => {
+ // Create a test project for each test
+ const project = await projectsRepository.create({
+ id: uuidv4(),
+ name: `test-workflow-project-${Date.now()}`,
+ sourceRepoPath: '/tmp/test/source',
+ mirrorRepoPath: '/tmp/test/mirror.git',
+ relayRepoPath: '/tmp/test/relay',
+ defaultBranch: 'main',
+ });
+ testProjectId = project.id;
+ });
+
+ describe('Work item creation triggers workflow', () => {
+ it('should trigger workflow execution when work item is created', async () => {
+ // Create work item via event service (which emits workitem.created event)
+ const workItem = await workItemEventService.createWorkItem({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test issue',
+ body: 'Test description',
+ });
+
+ // Wait for outbox processor + async workflow execution
+ await new Promise((resolve) => setTimeout(resolve, 1200));
+
+ // Check that a workflow run was created
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ expect(runs.length).toBeGreaterThan(0);
+
+ // Verify the run is associated with the work item
+ const run = runs[0];
+ expect(run).toBeDefined();
+ expect(run?.workItemId).toBe(workItem.id);
+ // Default workflow is versioned and project-scoped (e.g. workitem-default-v12-)
+ expect(run?.workflowId).toContain(`-${testProjectId}`);
+ expect(run?.workflowId).toContain('workitem-default-v');
+ });
+
+ it('should find and execute event node for workitem.created', async () => {
+ const workItem = await workItemEventService.createWorkItem({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test issue',
+ body: 'Test description',
+ });
+
+ // Wait for workflow execution
+ await new Promise((resolve) => setTimeout(resolve, 1200));
+
+ // Check that step executions were created
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ expect(runs.length).toBeGreaterThan(0);
+
+ const run = runs[0]!;
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+
+ // Should have at least the event node executed
+ expect(steps.length).toBeGreaterThan(0);
+
+ // Find the event node step
+ const eventStep = steps.find((s: NodeRunRecord) => s.nodeId === 'ev_workitem_created');
+ expect(eventStep).toBeDefined();
+ });
+ });
+
+ describe('Node Executor Tests', () => {
+ describe('EventNodeExecutor', () => {
+ it('should execute event node successfully', async () => {
+ const workItem = await workItemEventService.createWorkItem({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test event node',
+ body: 'Test',
+ });
+
+ // Wait for outbox + callback-based completion processing
+ await new Promise((resolve) => setTimeout(resolve, 2000));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs[0]!;
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const eventStep = steps.find((s: NodeRunRecord) => s.nodeId === 'ev_workitem_created');
+
+ expect(eventStep).toBeDefined();
+ // The event node should eventually complete; depending on timing, it may still be "running"
+ // when the assertion runs. Accept either state to avoid test flakiness.
+ expect(['running', 'succeeded']).toContain(eventStep?.status);
+ });
+ });
+
+ describe('WorkspaceNodeExecutor', () => {
+ it('should execute workspace initialization node', async () => {
+ // Create a workflow with workspace init node
+ const workflowId = `test-workspace-workflow-${Date.now()}`;
+ const testWorkflow: Workflow = {
+ version: 1,
+ workflow: {
+ id: workflowId,
+ name: `Test Workspace Workflow ${Date.now()}`,
+ description: 'Test',
+ backbone: {
+ nodes: [
+ {
+ id: 'workspace_init',
+ display: { name: 'Initialize workspace' },
+ subject: { kind: 'workitem', idRef: 'ctx.event.subject.id' },
+ listens: [{ on: 'workitem.created' }],
+ trigger: {
+ when: 'true',
+ call: {
+ resourceType: 'Worktree',
+ idempotencyKey: 'workitem:{ctx.event.subject.id}:worktree:init',
+ input: { ensureWorktree: true },
+ },
+ },
+ onResult: [
+ {
+ when: 'true',
+ patch: {},
+ emit: [],
+ },
+ ],
+ },
+ ],
+ slots: [],
+ },
+ extensions: { nodes: [] },
+ executors: { registry: {} },
+ policies: {},
+ },
+ };
+
+ const workflow = await workflowsRepository.create({
+ id: workflowId,
+ projectId: testProjectId,
+ name: `Test Workspace Workflow ${Date.now()}`,
+ definition: testWorkflow,
+ isDefault: false,
+ });
+
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test workspace',
+ body: 'Test',
+ });
+
+ // Execute workflow manually
+ // Note: This may fail if workspace service dependencies aren't available
+ // (e.g., git repository doesn't exist at the test path)
+ try {
+ await workflowExecutionService.execute(workflow.id, workItem.id);
+
+ // Wait for execution
+ await new Promise((resolve) => setTimeout(resolve, 200));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs.find((r: WorkflowRunRecord) => r.workflowId === workflow.id);
+ expect(run).toBeDefined();
+
+ if (run) {
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const workspaceStep = steps.find((s: NodeRunRecord) => s.nodeId === 'workspace_init');
+
+ expect(workspaceStep).toBeDefined();
+
+ // Workspace step may fail if git repo doesn't exist at test path
+ // But we verify the executor structure is correct
+ if (workspaceStep?.status === 'succeeded') {
+ // Verify output contains workspace information if step succeeded
+ if (workspaceStep?.output) {
+ const outputs =
+ typeof workspaceStep.output === 'string'
+ ? JSON.parse(workspaceStep.output)
+ : workspaceStep.output;
+ expect(outputs).toHaveProperty('workspace.worktreePath');
+ expect(outputs).toHaveProperty('workspace.headBranch');
+ }
+ } else {
+ // Step failed, but executor structure is correct
+ expect(workspaceStep?.status).toBe('failed');
+ }
+ }
+ } catch (error) {
+ // Expected if workspace service dependencies aren't available
+ // But we verify the executor can handle the node type
+ expect(error).toBeDefined();
+ }
+ });
+ });
+
+ describe('AgentNodeExecutor', () => {
+ it('should handle agent node execution (without actually running agent)', async () => {
+ // This test verifies the executor can handle agent nodes
+ // We'll mock the agent service to avoid actual agent execution
+
+ const workflowId = `test-agent-workflow-${Date.now()}`;
+ const testWorkflow: Workflow = {
+ version: 1,
+ workflow: {
+ id: workflowId,
+ name: `Test Agent Workflow ${Date.now()}`,
+ description: 'Test',
+ backbone: {
+ nodes: [
+ {
+ id: 'agent_process',
+ display: { name: 'Process work item' },
+ subject: { kind: 'task', idRef: 'ctx.event.subject.id' },
+ listens: [{ on: 'task.created' }],
+ trigger: {
+ when: 'true',
+ call: {
+ resourceType: 'AgentRun',
+ input: {
+ session: { mode: 'new', export: true },
+ template: 'Test prompt',
+ },
+ },
+ },
+ onResult: [
+ {
+ when: 'true',
+ patch: {},
+ emit: [],
+ },
+ ],
+ },
+ ],
+ slots: [],
+ },
+ extensions: { nodes: [] },
+ executors: { registry: {} },
+ policies: {},
+ },
+ };
+
+ const workflow = await workflowsRepository.create({
+ id: workflowId,
+ projectId: testProjectId,
+ name: `Test Agent Workflow ${Date.now()}`,
+ definition: testWorkflow,
+ isDefault: false,
+ });
+
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test agent',
+ body: 'Test',
+ });
+
+ // Note: This will fail if agent service isn't properly mocked
+ // But it verifies the executor structure is correct
+ try {
+ await workflowExecutionService.execute(workflow.id, workItem.id);
+
+ // Wait for execution
+ await new Promise((resolve) => setTimeout(resolve, 500));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs.find((r: WorkflowRunRecord) => r.workflowId === workflow.id);
+
+ if (run) {
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const agentStep = steps.find((s: NodeRunRecord) => s.nodeId === 'agent_process');
+
+ // Step should exist (may be failed if agent service not available, but structure is correct)
+ expect(agentStep).toBeDefined();
+ }
+ } catch (error) {
+ // Expected if agent service isn't available in test environment
+ // But we verify the executor can handle the node type
+ expect(error).toBeDefined();
+ }
+ });
+ });
+
+ describe('PRNodeExecutor', () => {
+ it('should handle PR create node', async () => {
+ const workflowId = `test-pr-workflow-${Date.now()}`;
+ const testWorkflow: Workflow = {
+ version: 1,
+ workflow: {
+ id: workflowId,
+ name: `Test PR Workflow ${Date.now()}`,
+ description: 'Test',
+ backbone: {
+ nodes: [
+ {
+ id: 'create_pr',
+ display: { name: 'Create PR' },
+ subject: { kind: 'workitem', idRef: 'ctx.event.subject.id' },
+ listens: [{ on: 'workitem.updated' }],
+ trigger: {
+ when: 'true',
+ call: {
+ resourceType: 'PullRequest',
+ idempotencyKey: 'workitem:{ctx.event.subject.id}:pr:create',
+ input: {
+ base: 'main',
+ head: 'current_branch',
+ },
+ },
+ },
+ onResult: [
+ {
+ when: 'true',
+ patch: {},
+ emit: [],
+ },
+ ],
+ },
+ ],
+ slots: [],
+ },
+ extensions: { nodes: [] },
+ executors: { registry: {} },
+ policies: {},
+ },
+ };
+
+ const workflow = await workflowsRepository.create({
+ id: workflowId,
+ projectId: testProjectId,
+ name: `Test PR Workflow ${Date.now()}`,
+ definition: testWorkflow,
+ isDefault: false,
+ });
+
+ // Create work item with workspace initialized
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test PR',
+ body: 'Test',
+ worktreePath: '/tmp/test/worktree',
+ headBranch: 'feature/test',
+ baseBranch: 'main',
+ workspaceStatus: 'ready',
+ });
+
+ // Note: This will fail if PR service dependencies aren't available
+ // But it verifies the executor structure is correct
+ try {
+ await workflowExecutionService.execute(workflow.id, workItem.id);
+
+ await new Promise((resolve) => setTimeout(resolve, 200));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs.find((r: WorkflowRunRecord) => r.workflowId === workflow.id);
+
+ if (run) {
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const prStep = steps.find((s: NodeRunRecord) => s.nodeId === 'create_pr');
+
+ // Step should exist
+ expect(prStep).toBeDefined();
+ }
+ } catch (error) {
+ // Expected if PR service dependencies aren't available
+ expect(error).toBeDefined();
+ }
+ });
+ });
+
+ describe('GitNodeExecutor', () => {
+ it('should handle git commit node', async () => {
+ const workflowId = `test-git-workflow-${Date.now()}`;
+ const testWorkflow: Workflow = {
+ version: 1,
+ workflow: {
+ id: workflowId,
+ name: `Test Git Workflow ${Date.now()}`,
+ description: 'Test',
+ backbone: {
+ nodes: [
+ {
+ id: 'git_commit',
+ display: { name: 'Commit changes' },
+ subject: { kind: 'workitem', idRef: 'ctx.event.subject.id' },
+ listens: [{ on: 'workitem.updated' }],
+ trigger: {
+ when: 'true',
+ call: {
+ resourceType: 'GitOps',
+ idempotencyKey: 'workitem:{ctx.event.subject.id}:git:commit',
+ input: {
+ message: 'Test commit',
+ },
+ },
+ },
+ onResult: [
+ {
+ when: 'true',
+ patch: {},
+ emit: [],
+ },
+ ],
+ },
+ ],
+ slots: [],
+ },
+ extensions: { nodes: [] },
+ executors: { registry: {} },
+ policies: {},
+ },
+ };
+
+ const workflow = await workflowsRepository.create({
+ id: workflowId,
+ projectId: testProjectId,
+ name: `Test Git Workflow ${Date.now()}`,
+ definition: testWorkflow,
+ isDefault: false,
+ });
+
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test git',
+ body: 'Test',
+ worktreePath: '/tmp/test/worktree',
+ workspaceStatus: 'ready',
+ });
+
+ // Note: This will fail if git service dependencies aren't available
+ try {
+ await workflowExecutionService.execute(workflow.id, workItem.id);
+
+ await new Promise((resolve) => setTimeout(resolve, 200));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs.find((r: WorkflowRunRecord) => r.workflowId === workflow.id);
+
+ if (run) {
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const gitStep = steps.find((s: NodeRunRecord) => s.nodeId === 'git_commit');
+
+ // Step should exist
+ expect(gitStep).toBeDefined();
+ }
+ } catch (error) {
+ // Expected if git service dependencies aren't available
+ expect(error).toBeDefined();
+ }
+ });
+ });
+
+ describe('CINodeExecutor', () => {
+ it('should handle CI run node', async () => {
+ const workflowId = `test-ci-workflow-${Date.now()}`;
+ const testWorkflow: Workflow = {
+ version: 1,
+ workflow: {
+ id: workflowId,
+ name: `Test CI Workflow ${Date.now()}`,
+ description: 'Test',
+ backbone: {
+ nodes: [
+ {
+ id: 'ci_run',
+ display: { name: 'Run CI checks' },
+ subject: { kind: 'workitem', idRef: 'ctx.event.subject.id' },
+ listens: [{ on: 'workitem.updated' }],
+ trigger: {
+ when: 'true',
+ call: {
+ resourceType: 'CommandExec',
+ idempotencyKey: 'workitem:{ctx.event.subject.id}:ci:run',
+ input: {
+ checks: ['lint'],
+ },
+ },
+ },
+ onResult: [
+ {
+ when: 'true',
+ patch: {},
+ emit: [],
+ },
+ ],
+ },
+ ],
+ slots: [],
+ },
+ extensions: { nodes: [] },
+ executors: { registry: {} },
+ policies: {},
+ },
+ };
+
+ const workflow = await workflowsRepository.create({
+ id: workflowId,
+ projectId: testProjectId,
+ name: `Test CI Workflow ${Date.now()}`,
+ definition: testWorkflow,
+ isDefault: false,
+ });
+
+ const workItem = await workItemsRepository.create({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test CI',
+ body: 'Test',
+ worktreePath: '/tmp/test/worktree',
+ workspaceStatus: 'ready',
+ });
+
+ // Note: This will fail if CI dependencies aren't available
+ try {
+ await workflowExecutionService.execute(workflow.id, workItem.id);
+
+ await new Promise((resolve) => setTimeout(resolve, 200));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ const run = runs.find((r: WorkflowRunRecord) => r.workflowId === workflow.id);
+
+ if (run) {
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+ const ciStep = steps.find((s: NodeRunRecord) => s.nodeId === 'ci_run');
+
+ // Step should exist
+ expect(ciStep).toBeDefined();
+ }
+ } catch (error) {
+ // Expected if CI dependencies aren't available
+ expect(error).toBeDefined();
+ }
+ });
+ });
+ });
+
+ describe('Workflow execution flow', () => {
+ it('should execute nodes in sequence', async () => {
+ // The service enforces a versioned built-in default workflow (v12) per project.
+ // This test verifies the engine progresses at least the anchor node.
+ const workItem = await workItemEventService.createWorkItem({
+ id: uuidv4(),
+ projectId: testProjectId,
+ type: 'issue',
+ title: 'Test sequence',
+ body: 'Test',
+ });
+
+ // Wait for outbox processor to dispatch workitem.created
+ await new Promise((resolve) => setTimeout(resolve, 1500));
+
+ const runs = await workflowsRepository.findAllRuns(workItem.id);
+ expect(runs.length).toBeGreaterThan(0);
+
+ const run = runs[0]!;
+ const steps = await workflowsRepository.findNodeRunsByWorkflowRunId(run.id);
+
+ // Should have at least the anchor node
+ expect(steps.length).toBeGreaterThan(0);
+
+ const anchor = steps.find((s: NodeRunRecord) => s.nodeId === 'ev_workitem_created');
+ expect(anchor).toBeDefined();
+ });
+ });
+});
diff --git a/backend/src/services/workflow/WorkflowExecutionService.ts b/backend/src/services/workflow/WorkflowExecutionService.ts
new file mode 100644
index 0000000..9f8b312
--- /dev/null
+++ b/backend/src/services/workflow/WorkflowExecutionService.ts
@@ -0,0 +1,1861 @@
+/**
+ * WorkflowExecutionService - Orchestrates workflow execution using NodeSpec model
+ *
+ * Implements the optimized workflow design:
+ * - Event-driven execution based on listen/emit
+ * - NodeSpec with listen, trigger, onResult
+ * - Uniform event envelope format
+ * - Resource versioning and idempotency
+ * - Nodes call Resources via ResourceDispatcher with completion callback
+ * - Resources complete via callback (NOT event bus)
+ * - Only Nodes emit events
+ */
+
+import type {
+ Workflow,
+ NodeSpec,
+ WorkflowEvent,
+ ResourceKind,
+ NodeRunStatus,
+ ResourceType,
+} from 'git-vibe-shared';
+import {
+ WORKFLOW_RUN_STATUS_SUCCEEDED,
+ WORKFLOW_RUN_STATUS_FAILED,
+ WORKFLOW_RUN_STATUS_RUNNING,
+ WORKFLOW_RUN_STATUS_PENDING,
+ NODE_RUN_STATUS_RUNNING,
+ NODE_RUN_STATUS_SUCCEEDED,
+ NODE_RUN_STATUS_FAILED,
+ RESOURCE_STATUS_SUCCEEDED,
+ RESOURCE_STATUS_FAILED,
+} from 'git-vibe-shared';
+import type { NodeRun, WorkflowRun } from '../../types/models.js';
+import { workItemsRepository } from '../../repositories/WorkItemsRepository.js';
+import { workflowsRepository } from '../../repositories/WorkflowsRepository.js';
+import { workflowEventBus, type WorkflowEventType } from './WorkflowEventBus.js';
+import { resourceDispatcher, type ResourceOutcome } from '../ResourceDispatcher.js';
+import { eventOutboxService } from '../EventOutbox.js';
+import { agentRunsRepository } from '../../repositories/AgentRunsRepository.js';
+import { pullRequestsRepository } from '../../repositories/PullRequestsRepository.js';
+import { tasksRepository } from '../../repositories/TasksRepository.js';
+import { getDb } from '../../db/client.js';
+import { nodeRuns, workItems } from '../../models/schema.js';
+import { eq } from 'drizzle-orm';
+import {
+ createDefaultWorkflow,
+ getDefaultWorkflowVersion,
+ getWorkflowVersion,
+} from './defaultWorkflow.js';
+
+/** Evaluation context: event, subject (workitem/task), and related entities. No aggregation of node runs. */
+interface ResourceContext {
+ workitem?: Record;
+ workItem?: Record;
+ task?: Record;
+ pr_request?: Record;
+ worktree?: Record;
+ ci?: Record;
+ event?: WorkflowEvent;
+}
+
+interface EvaluationContext extends ResourceContext {}
+
+export class WorkflowExecutionService {
+ // Simple bounded in-memory de-dup cache to avoid unbounded Set growth.
+ // Persistent de-duplication should be handled at the event outbox consumer layer.
+ private processedEventIds: string[] = [];
+ private readonly MAX_PROCESSED_EVENTS = 10_000;
+
+ // Track completed NodeRun attempts for exactly-once completion guarantee
+ private completedNodeRunAttempts: Set = new Set();
+
+ /**
+ * Evaluate a boolean expression (safe-by-default facade).
+ * IMPORTANT: Do not fall back to unsafe evaluation.
+ */
+ private async evaluateExpression(expr: string, context: EvaluationContext): Promise {
+ return this.evaluateExpressionSafe(expr, context);
+ }
+
+ constructor(
+ private workItemsRepo = workItemsRepository,
+ private workflowsRepo = workflowsRepository
+ ) {
+ // Register event handlers
+ this.setupEventHandlers();
+ console.log('[WorkflowExecutionService] Event handlers registered');
+ }
+
+ /**
+ * Execute a workflow for a workitem (manual trigger)
+ * Creates a workflow run and triggers the initial event
+ */
+ async execute(workflowId: string, workItemId: string): Promise {
+ // Return type is backend's WorkflowRun with Date fields
+ const workItem = await this.workItemsRepo.findById(workItemId);
+ if (!workItem) {
+ throw new Error(`WorkItem ${workItemId} not found`);
+ }
+
+ const workflowRecord = await this.workflowsRepo.findById(workflowId);
+ if (!workflowRecord) {
+ throw new Error(`Workflow ${workflowId} not found`);
+ }
+
+ // Get or create workflow run
+ const existingRuns = await this.workflowsRepo.findAllRuns(workItemId);
+ let workflowRun = existingRuns.find(
+ (r: { workflowId: string; status: string }) =>
+ r.workflowId === workflowId &&
+ r.status !== WORKFLOW_RUN_STATUS_SUCCEEDED &&
+ r.status !== WORKFLOW_RUN_STATUS_FAILED
+ );
+
+ if (!workflowRun) {
+ const runId = crypto.randomUUID();
+ workflowRun = await this.workflowsRepo.createRun({
+ id: runId,
+ workflowId,
+ workItemId,
+ });
+ }
+
+ // Trigger workflow by emitting workitem.created event to match default workflow entry node
+ // The default workflow's ev_workitem_created node listens to 'workitem.created'
+ const event = workflowEventBus.createEvent(
+ 'workitem.created',
+ { kind: 'workitem', id: workItemId },
+ {
+ title: workItem.title,
+ body: workItem.body,
+ },
+ {
+ resourceVersion: (workItem as any).version || 1,
+ }
+ );
+ await eventOutboxService.addEvent(event);
+
+ return {
+ id: workflowRun.id,
+ workflowId: workflowRun.workflowId,
+ workItemId: workflowRun.workItemId,
+ status: workflowRun.status as NodeRunStatus,
+ currentStepId: workflowRun.currentStepId,
+ startedAt: workflowRun.startedAt ? new Date(workflowRun.startedAt) : null,
+ finishedAt: workflowRun.finishedAt ? new Date(workflowRun.finishedAt) : null,
+ createdAt: workflowRun.createdAt,
+ } as WorkflowRun;
+ }
+
+ /**
+ * Recover interrupted workflow runs on service startup
+ * Finds all runs with status 'running' or 'pending' and resumes them
+ */
+ async recoverInterruptedRuns(): Promise {
+ try {
+ // Find all interrupted runs (running or pending)
+ const allRuns = await this.workflowsRepo.findAllRuns();
+ const interruptedRuns = allRuns.filter(
+ (r: { status: string }) =>
+ r.status === WORKFLOW_RUN_STATUS_RUNNING || r.status === WORKFLOW_RUN_STATUS_PENDING
+ );
+
+ if (interruptedRuns.length === 0) {
+ return;
+ }
+
+ console.log(
+ `[WorkflowExecutionService] Found ${interruptedRuns.length} interrupted workflow runs to recover`
+ );
+
+ for (const run of interruptedRuns) {
+ try {
+ // Mark as failed for now (new format doesn't support resuming the same way)
+ await this.workflowsRepo.updateRun(run.id, {
+ status: WORKFLOW_RUN_STATUS_FAILED,
+ finishedAt: new Date(),
+ });
+ console.log(`[WorkflowExecutionService] Marked interrupted run ${run.id} as failed`);
+ } catch (error) {
+ console.error(
+ `[WorkflowExecutionService] Failed to recover workflow run ${run.id}:`,
+ error
+ );
+ }
+ }
+ } catch (error) {
+ console.error('[WorkflowExecutionService] Error during workflow recovery:', error);
+ }
+ }
+
+ /**
+ * Setup event handlers for workflow events
+ * Uses array-driven approach to reduce duplication
+ */
+ private setupEventHandlers(): void {
+ // Only handle regular node-emitted events
+ // resource.result events no longer exist - resources complete via callback
+ workflowEventBus.onAny(async (event) => {
+ await this.handleEvent(event);
+ });
+ }
+
+ /**
+ * Complete a NodeRun from resource outcome (callback-based completion)
+ * This is called by resources via the completion callback
+ */
+ private async completeNodeRun(nodeRunId: string, outcome: ResourceOutcome): Promise {
+ console.log(
+ `[WorkflowExecutionService] Completing NodeRun ${nodeRunId} with outcome:`,
+ outcome
+ );
+
+ const db = await getDb();
+
+ // Get the NodeRun record
+ const [nodeRunRecord] = await db
+ .select()
+ .from(nodeRuns)
+ .where(eq(nodeRuns.id, nodeRunId))
+ .execute();
+
+ if (!nodeRunRecord) {
+ console.error(`[WorkflowExecutionService] NodeRun ${nodeRunId} not found`);
+ throw new Error(`NodeRun ${nodeRunId} not found`);
+ }
+
+ // Exactly-once completion guarantee
+ const completionKey = `${nodeRunId}:${nodeRunRecord.attempt}`;
+ if (this.completedNodeRunAttempts.has(completionKey)) {
+ console.log(
+ `[WorkflowExecutionService] NodeRun ${nodeRunId} attempt ${nodeRunRecord.attempt} already completed, ignoring duplicate`
+ );
+ return;
+ }
+
+ // Safety check: validate resource type matches what was called
+ if (nodeRunRecord.resourceType !== outcome.resourceType) {
+ console.error(
+ `[WorkflowExecutionService] Resource type mismatch: expected ${nodeRunRecord.resourceType}, got ${outcome.resourceType}`
+ );
+ throw new Error(
+ `Resource type mismatch for NodeRun ${nodeRunId}: expected ${nodeRunRecord.resourceType}, got ${outcome.resourceType}`
+ );
+ }
+
+ // Mark as completed
+ this.completedNodeRunAttempts.add(completionKey);
+
+ // Load workflow and node spec
+ const workflowRun = await this.workflowsRepo.findRunById(nodeRunRecord.workflowRunId);
+ if (!workflowRun) {
+ console.error(
+ `[WorkflowExecutionService] WorkflowRun ${nodeRunRecord.workflowRunId} not found`
+ );
+ throw new Error(`WorkflowRun ${nodeRunRecord.workflowRunId} not found`);
+ }
+
+ const workflowRecord = await this.workflowsRepo.findById(workflowRun.workflowId);
+ if (!workflowRecord) {
+ console.error(`[WorkflowExecutionService] Workflow ${workflowRun.workflowId} not found`);
+ throw new Error(`Workflow ${workflowRun.workflowId} not found`);
+ }
+
+ const workflow: Workflow =
+ typeof workflowRecord.definition === 'string'
+ ? JSON.parse(workflowRecord.definition)
+ : (workflowRecord.definition as Workflow);
+
+ const allNodes = this.getAllNodes(workflow);
+ const nodeSpec = allNodes.find((n) => n.id === nodeRunRecord.nodeId);
+ if (!nodeSpec) {
+ console.error(`[WorkflowExecutionService] NodeSpec ${nodeRunRecord.nodeId} not found`);
+ throw new Error(`NodeSpec ${nodeRunRecord.nodeId} not found`);
+ }
+
+ // Build evaluation context with outcome
+ const workItemId = await this.resolveWorkItemId({
+ kind: nodeRunRecord.subjectKind as ResourceKind,
+ id: nodeRunRecord.subjectId,
+ });
+ if (!workItemId) {
+ console.error(
+ `[WorkflowExecutionService] Could not resolve workItemId for NodeRun ${nodeRunId}`
+ );
+ return;
+ }
+
+ const workItem = await this.workItemsRepo.findById(workItemId);
+ if (!workItem) {
+ console.error(`[WorkflowExecutionService] WorkItem ${workItemId} not found`);
+ return;
+ }
+
+ // Create a synthetic event for context building (resource outcome as event)
+ const syntheticEvent: WorkflowEvent = {
+ eventId: crypto.randomUUID(),
+ type: 'node.completed', // Internal event type for completion
+ at: new Date().toISOString(),
+ subject: {
+ kind: nodeRunRecord.subjectKind as ResourceKind,
+ id: nodeRunRecord.subjectId,
+ },
+ causedBy: {
+ workflowRunId: nodeRunRecord.workflowRunId,
+ nodeId: nodeRunRecord.nodeId,
+ nodeRunId: nodeRunId,
+ attempt: nodeRunRecord.attempt,
+ },
+ data: {
+ resourceType: outcome.resourceType,
+ resourceId: outcome.resourceId,
+ status: outcome.status,
+ summary: outcome.summary,
+ outputs: outcome.outputs,
+ },
+ };
+
+ const context = await this.buildEvaluationContext(
+ workflow,
+ nodeRunRecord.workflowRunId,
+ workItemId,
+ syntheticEvent
+ );
+
+ // Add ctx.outcome for onResult evaluation
+ (context as any).outcome = outcome;
+
+ // Evaluate onResult rules
+ let ruleMatched = false;
+ for (const onResultRule of nodeSpec.onResult) {
+ const conditionMet = await this.evaluateExpression(onResultRule.when, context);
+ if (conditionMet) {
+ ruleMatched = true;
+ // Apply patches to resources
+ if (onResultRule.patch) {
+ await this.applyResourcePatches(nodeSpec, context, onResultRule.patch);
+ }
+
+ // Emit events via outbox
+ if (onResultRule.emit) {
+ for (const emit of onResultRule.emit) {
+ // Resolve templates in emit.data
+ let emitData = emit.data as any;
+ if (typeof emitData === 'object' && emitData !== null) {
+ emitData = await this.parsePatchValues(emitData as Record, context);
+ }
+
+ // Determine event subject
+ let eventSubject = syntheticEvent.subject;
+ if (emit.type.startsWith('task.')) {
+ const taskId = emitData?.taskId || emitData?.task?.id;
+ if (taskId) {
+ eventSubject = { kind: 'task' as ResourceKind, id: String(taskId) };
+ } else if (context.task) {
+ eventSubject = { kind: 'task' as ResourceKind, id: String(context.task.id) };
+ }
+ }
+
+ const resultEvent = workflowEventBus.createEvent(
+ emit.type as WorkflowEventType,
+ eventSubject,
+ emitData,
+ {
+ causedBy: syntheticEvent.causedBy,
+ }
+ );
+ await eventOutboxService.addEvent(resultEvent);
+ }
+ }
+
+ // Determine node run status based on resource result
+ const nodeStatus: NodeRunStatus =
+ outcome.status === RESOURCE_STATUS_SUCCEEDED
+ ? NODE_RUN_STATUS_SUCCEEDED
+ : outcome.status === RESOURCE_STATUS_FAILED
+ ? NODE_RUN_STATUS_FAILED
+ : NODE_RUN_STATUS_SUCCEEDED; // Default to succeeded
+
+ // Update node run status
+ await this.updateNodeRunStatus(
+ nodeRunId,
+ nodeStatus,
+ outcome.outputs,
+ outcome.status === RESOURCE_STATUS_FAILED ? outcome.summary : undefined
+ );
+
+ // Update workflow run status based on node run completion
+ await this.updateWorkflowRunStatus(nodeRunRecord.workflowRunId);
+ break; // Only process first matching rule
+ }
+ }
+
+ // If no rule matched, still update node run status based on resource result
+ if (!ruleMatched) {
+ const nodeStatus: NodeRunStatus =
+ outcome.status === RESOURCE_STATUS_SUCCEEDED
+ ? NODE_RUN_STATUS_SUCCEEDED
+ : outcome.status === RESOURCE_STATUS_FAILED
+ ? NODE_RUN_STATUS_FAILED
+ : NODE_RUN_STATUS_SUCCEEDED;
+
+ await this.updateNodeRunStatus(
+ nodeRunId,
+ nodeStatus,
+ outcome.outputs,
+ outcome.status === RESOURCE_STATUS_FAILED ? outcome.summary : undefined
+ );
+ await this.updateWorkflowRunStatus(nodeRunRecord.workflowRunId);
+ }
+ }
+
+ /**
+ * Handle a workflow event
+ * Implements the event handling loop from optimized design
+ */
+ private async handleEvent(event: WorkflowEvent): Promise {
+ // Event de-dup by eventId
+ if (this.processedEventIds.includes(event.eventId)) {
+ console.log(`[WorkflowExecutionService] Event ${event.eventId} already processed, skipping`);
+ return;
+ }
+ this.processedEventIds.push(event.eventId);
+ if (this.processedEventIds.length > this.MAX_PROCESSED_EVENTS) {
+ // Drop oldest entries to bound memory usage
+ this.processedEventIds.splice(0, this.processedEventIds.length - this.MAX_PROCESSED_EVENTS);
+ }
+
+ console.log(
+ `[WorkflowExecutionService] Handling event ${event.type} (${event.eventId}) for subject ${event.subject.kind}:${event.subject.id}`
+ );
+
+ try {
+ // Load impacted resources and active WorkflowRuns
+ const workItemId =
+ event.subject.kind === 'workitem'
+ ? event.subject.id
+ : await this.resolveWorkItemId(event.subject);
+ if (!workItemId) {
+ console.warn(
+ `[WorkflowExecutionService] Could not resolve workItemId for event ${event.eventId}`
+ );
+ return;
+ }
+
+ // Load default workflow for the project
+ const workItem = await this.workItemsRepo.findById(workItemId);
+ if (!workItem) {
+ console.warn(`[WorkflowExecutionService] WorkItem ${workItemId} not found`);
+ return;
+ }
+
+ let defaultWorkflow = await this.workflowsRepo.findDefault(workItem.projectId);
+ if (!defaultWorkflow) {
+ // Create default workflow if it doesn't exist
+ const expectedDefaultWorkflow = createDefaultWorkflow(workItem.projectId);
+ defaultWorkflow = await this.workflowsRepo.create({
+ id: expectedDefaultWorkflow.workflow.id,
+ projectId: workItem.projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: getDefaultWorkflowVersion(),
+ });
+ }
+
+ // Parse workflow and check if it needs updating based on version
+ // Handle both string and object definitions
+ let workflow: Workflow =
+ typeof defaultWorkflow.definition === 'string'
+ ? JSON.parse(defaultWorkflow.definition)
+ : (defaultWorkflow.definition as Workflow);
+ const expectedDefaultWorkflow = createDefaultWorkflow(workItem.projectId);
+ const CURRENT_VERSION = getDefaultWorkflowVersion();
+ const dbVersion = defaultWorkflow.version || getWorkflowVersion(workflow) || 1;
+
+ // Check if workflow version is outdated
+ if (dbVersion < CURRENT_VERSION) {
+ console.log(
+ `[WorkflowExecutionService] Workflow ${defaultWorkflow.id} version ${dbVersion} is outdated, updating to v${CURRENT_VERSION}...`
+ );
+
+ const oldId = defaultWorkflow.id;
+ const newId = expectedDefaultWorkflow.workflow.id;
+
+ // If ID changed (due to version change), preserve old version and create new default
+ if (oldId !== newId) {
+ // Mark old workflow as non-default (preserve for traceability)
+ await this.workflowsRepo.update(oldId, {
+ isDefault: false,
+ });
+ // Create new default workflow with new ID
+ const newWorkflowRecord = await this.workflowsRepo.create({
+ id: newId,
+ projectId: workItem.projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: CURRENT_VERSION,
+ });
+ // Handle both string and object definitions
+ workflow =
+ typeof newWorkflowRecord.definition === 'string'
+ ? JSON.parse(newWorkflowRecord.definition)
+ : (newWorkflowRecord.definition as Workflow);
+ defaultWorkflow = newWorkflowRecord;
+ console.log(
+ `[WorkflowExecutionService] Created new default workflow ${newId} (v${CURRENT_VERSION}), preserved old workflow ${oldId} as non-default`
+ );
+ } else {
+ // Same ID, just update the definition (preserve old version in history if needed)
+ const updatedWorkflowRecord = await this.workflowsRepo.update(oldId, {
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ version: CURRENT_VERSION,
+ isDefault: true,
+ });
+
+ if (updatedWorkflowRecord) {
+ // Handle both string and object definitions
+ workflow =
+ typeof updatedWorkflowRecord.definition === 'string'
+ ? JSON.parse(updatedWorkflowRecord.definition)
+ : (updatedWorkflowRecord.definition as Workflow);
+ defaultWorkflow = updatedWorkflowRecord;
+ console.log(
+ `[WorkflowExecutionService] Updated workflow ${oldId} to v${CURRENT_VERSION}`
+ );
+ } else {
+ // If update failed, create new workflow with new ID and preserve old one
+ await this.workflowsRepo.update(oldId, {
+ isDefault: false,
+ });
+ const newWorkflowRecord = await this.workflowsRepo.create({
+ id: newId,
+ projectId: workItem.projectId,
+ name: expectedDefaultWorkflow.workflow.name,
+ definition: expectedDefaultWorkflow,
+ isDefault: true,
+ version: CURRENT_VERSION,
+ });
+ // Handle both string and object definitions
+ workflow =
+ typeof newWorkflowRecord.definition === 'string'
+ ? JSON.parse(newWorkflowRecord.definition)
+ : (newWorkflowRecord.definition as Workflow);
+ defaultWorkflow = newWorkflowRecord;
+ console.log(
+ `[WorkflowExecutionService] Created new default workflow ${newId} (v${CURRENT_VERSION}), preserved old workflow ${oldId} as non-default`
+ );
+ }
+ }
+ }
+
+ let allNodes = this.getAllNodes(workflow);
+ console.log(
+ `[WorkflowExecutionService] Using workflow ${defaultWorkflow.id} v${getWorkflowVersion(workflow)} with ${allNodes.length} nodes`
+ );
+
+ // Get or create workflow run
+ const existingRuns = await this.workflowsRepo.findAllRuns(workItemId);
+ let runId: string;
+ let workflowRun = existingRuns.find(
+ (r: { workflowId: string; status: string }) =>
+ r.workflowId === defaultWorkflow.id &&
+ r.status !== WORKFLOW_RUN_STATUS_SUCCEEDED &&
+ r.status !== WORKFLOW_RUN_STATUS_FAILED
+ );
+
+ if (workflowRun) {
+ runId = workflowRun.id;
+ } else {
+ runId = crypto.randomUUID();
+ await this.workflowsRepo.createRun({
+ id: runId,
+ workflowId: defaultWorkflow.id,
+ workItemId,
+ });
+ }
+
+ // Find NodeSpecs whose listens[].on matches event type
+ // allNodes is already declared above
+ const context = await this.buildEvaluationContext(workflow, runId, workItemId, event);
+ const candidateNodes: NodeSpec[] = [];
+ for (const node of allNodes) {
+ const matches = await this.matchesListenRule(node, event, context);
+ if (matches) {
+ candidateNodes.push(node);
+ }
+ }
+
+ console.log(
+ `[WorkflowExecutionService] Found ${candidateNodes.length} candidate nodes for event ${event.type}`
+ );
+
+ // For each candidate node, evaluate and execute
+ for (const nodeSpec of candidateNodes) {
+ console.log(
+ `[WorkflowExecutionService] Processing candidate node ${nodeSpec.id} for event ${event.type}`
+ );
+ await this.processNode(nodeSpec, runId, context);
+ }
+
+ // Terminal completion: only mark succeeded when the workflow emits a terminal anchor.
+ // This avoids incorrectly completing event-driven workflows where many nodes are never triggered.
+ if (event.type === 'workflow.anchor.reached' && (event.data as any)?.anchor === 'merged') {
+ await this.workflowsRepo.updateRun(runId, {
+ status: WORKFLOW_RUN_STATUS_SUCCEEDED,
+ finishedAt: new Date(),
+ });
+ }
+ } catch (error) {
+ console.error(`[WorkflowExecutionService] Error handling event ${event.eventId}:`, error);
+ }
+ }
+
+ /**
+ * Process a node based on event
+ */
+ private async processNode(
+ nodeSpec: NodeSpec,
+ runId: string,
+ context: EvaluationContext
+ ): Promise {
+ try {
+ // Resolve subject resource using full context
+ const subjectId = await this.resolveIdRef(nodeSpec.subject.idRef, context);
+ if (!subjectId) {
+ console.warn(
+ `[WorkflowExecutionService] Could not resolve subject for node ${nodeSpec.id}`,
+ `idRef=${nodeSpec.subject.idRef}`,
+ `eventType=${context.event?.type}`,
+ `eventSubject=${JSON.stringify(context.event?.subject)}`,
+ `hasContextTask=${!!context.task}`,
+ `contextTaskId=${context.task?.id}`
+ );
+ return;
+ }
+
+ // Evaluate trigger.when (new format)
+ const shouldTrigger = await this.evaluateExpression(nodeSpec.trigger.when, context);
+ console.log(
+ `[WorkflowExecutionService] Node ${nodeSpec.id} trigger condition "${nodeSpec.trigger.when}" evaluated to: ${shouldTrigger}`
+ );
+ if (shouldTrigger) {
+ // Check idempotency: prevent duplicate execution
+ // Resolve idempotency key expression (if provided)
+ let idempotencyKey: string | undefined = undefined;
+ if (nodeSpec.trigger.call.idempotencyKey) {
+ idempotencyKey = await this.resolveExpression(
+ nodeSpec.trigger.call.idempotencyKey,
+ context
+ );
+ }
+
+ // Check if this node run already exists and succeeded
+ const existingNodeRun = await this.findExistingNodeRun(
+ runId,
+ nodeSpec.id,
+ subjectId,
+ nodeSpec.subject.kind,
+ idempotencyKey
+ );
+
+ if (existingNodeRun) {
+ if (existingNodeRun.status === 'succeeded') {
+ console.log(
+ `[WorkflowExecutionService] Node ${nodeSpec.id} already succeeded, skipping duplicate execution`
+ );
+ return; // Skip duplicate execution
+ }
+
+ // Handle retry logic for failed/canceled nodes
+ const maxAttempts = nodeSpec.retry?.maxAttempts || 1;
+ const backoffSeconds = nodeSpec.retry?.backoffSeconds || 0;
+
+ if (existingNodeRun.status === 'failed' || existingNodeRun.status === 'canceled') {
+ if (existingNodeRun.attempt < maxAttempts) {
+ console.log(
+ `[WorkflowExecutionService] Node ${nodeSpec.id} failed on attempt ${existingNodeRun.attempt}, will retry (attempt ${existingNodeRun.attempt + 1}/${maxAttempts})`
+ );
+
+ // Apply backoff delay if specified
+ if (backoffSeconds > 0) {
+ await new Promise((resolve) => setTimeout(resolve, backoffSeconds * 1000));
+ }
+
+ // Create retry NodeRun with incremented attempt number
+ const retryNodeRunId = crypto.randomUUID();
+ const resolvedInput = await this.parsePatchValues(
+ nodeSpec.trigger.call.input as Record,
+ context
+ );
+ const retryNodeRun: NodeRun = {
+ runId: retryNodeRunId,
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ resourceType: nodeSpec.trigger.call.resourceType,
+ subjectKind: nodeSpec.subject.kind,
+ subjectId,
+ subjectVersionAtStart: await this.getResourceVersion(
+ nodeSpec.subject.kind,
+ subjectId
+ ),
+ status: NODE_RUN_STATUS_RUNNING,
+ attempt: existingNodeRun.attempt + 1,
+ idempotencyKey: idempotencyKey ?? undefined,
+ input: resolvedInput,
+ output: {},
+ startedAt: new Date(),
+ finishedAt: null,
+ };
+
+ await this.persistNodeRun(retryNodeRun);
+
+ // Emit trigger events via outbox
+ if (nodeSpec.trigger.emit) {
+ for (const emit of nodeSpec.trigger.emit) {
+ const triggerEvent = workflowEventBus.createEvent(
+ emit.type as WorkflowEventType,
+ { kind: nodeSpec.subject.kind, id: subjectId },
+ emit.data,
+ {
+ causedBy: {
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ nodeRunId: retryNodeRunId,
+ attempt: existingNodeRun.attempt + 1,
+ },
+ }
+ );
+ await eventOutboxService.addEvent(triggerEvent);
+ }
+ }
+
+ // Create completion callback for retry NodeRun
+ const completeCallback = async (outcome: ResourceOutcome) => {
+ await this.completeNodeRun(retryNodeRunId, outcome);
+ };
+
+ // Call ResourceDispatcher with completion callback
+ await resourceDispatcher.call(
+ nodeSpec.trigger.call.resourceType,
+ resolvedInput,
+ {
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ nodeRunId: retryNodeRunId,
+ attempt: existingNodeRun.attempt + 1,
+ },
+ idempotencyKey,
+ completeCallback
+ );
+ return;
+ } else {
+ console.log(
+ `[WorkflowExecutionService] Node ${nodeSpec.id} failed after ${maxAttempts} attempts, giving up`
+ );
+ return;
+ }
+ }
+
+ // If existing run is still running/pending, skip to avoid duplicate resource calls
+ console.log(
+ `[WorkflowExecutionService] Node ${nodeSpec.id} has existing run with status ${existingNodeRun.status}, skipping duplicate execution`
+ );
+ return;
+ }
+
+ // Create NodeRun record
+ const nodeRunId = crypto.randomUUID();
+ // Resolve templates in trigger input BEFORE calling the resource.
+ // Best practice: workflow engine evaluates expressions; resources get concrete inputs.
+ const resolvedInput = await this.parsePatchValues(
+ nodeSpec.trigger.call.input as Record,
+ context
+ );
+ const nodeRun: NodeRun = {
+ runId: nodeRunId,
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ resourceType: nodeSpec.trigger.call.resourceType,
+ subjectKind: nodeSpec.subject.kind,
+ subjectId,
+ subjectVersionAtStart: await this.getResourceVersion(nodeSpec.subject.kind, subjectId),
+ status: NODE_RUN_STATUS_RUNNING,
+ attempt: 1,
+ idempotencyKey: idempotencyKey ?? undefined,
+ input: resolvedInput,
+ output: {},
+ startedAt: new Date(),
+ finishedAt: null,
+ };
+
+ // Persist node run to database (status is already running)
+ await this.persistNodeRun(nodeRun);
+
+ // Update workflow run status to running when first node starts
+ await this.updateWorkflowRunStatus(runId);
+
+ // Emit trigger events via outbox
+ if (nodeSpec.trigger.emit) {
+ for (const emit of nodeSpec.trigger.emit) {
+ const triggerEvent = workflowEventBus.createEvent(
+ emit.type as WorkflowEventType,
+ { kind: nodeSpec.subject.kind, id: subjectId },
+ emit.data,
+ {
+ causedBy: {
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ nodeRunId,
+ attempt: 1,
+ },
+ }
+ );
+ await eventOutboxService.addEvent(triggerEvent);
+ }
+ }
+
+ // Create completion callback for this NodeRun
+ const completeCallback = async (outcome: ResourceOutcome) => {
+ await this.completeNodeRun(nodeRunId, outcome);
+ };
+
+ // Call ResourceDispatcher with completion callback (NOT event bus)
+ await resourceDispatcher.call(
+ nodeSpec.trigger.call.resourceType,
+ resolvedInput,
+ {
+ workflowRunId: runId,
+ nodeId: nodeSpec.id,
+ nodeRunId,
+ attempt: 1,
+ },
+ idempotencyKey,
+ completeCallback
+ );
+ }
+ } catch (error) {
+ console.error(`[WorkflowExecutionService] Error processing node ${nodeSpec.id}:`, error);
+ }
+ }
+
+ /**
+ * Persist node run to database
+ */
+ private async persistNodeRun(nodeRun: NodeRun): Promise {
+ const db = await getDb();
+
+ // Insert new node run
+ await db.insert(nodeRuns).values({
+ id: nodeRun.runId,
+ runId: nodeRun.runId,
+ workflowRunId: nodeRun.workflowRunId,
+ nodeId: nodeRun.nodeId,
+ resourceType: nodeRun.resourceType,
+ subjectKind: nodeRun.subjectKind,
+ subjectId: nodeRun.subjectId,
+ subjectVersionAtStart: nodeRun.subjectVersionAtStart,
+ status: nodeRun.status,
+ attempt: nodeRun.attempt,
+ idempotencyKey: nodeRun.idempotencyKey || null,
+ input: JSON.stringify(nodeRun.input),
+ output: JSON.stringify(nodeRun.output),
+ startedAt: nodeRun.startedAt ? new Date(nodeRun.startedAt) : null,
+ createdAt: new Date(),
+ });
+ }
+
+ /**
+ * Update workflow run status based on node runs
+ * Sets status to running when first node starts, succeeded when all nodes succeed, failed when any node fails
+ */
+ private async updateWorkflowRunStatus(workflowRunId: string): Promise {
+ const nodeRuns = await this.getNodeRunsForWorkflowRun(workflowRunId);
+
+ if (nodeRuns.length === 0) {
+ return; // No node runs yet
+ }
+
+ // NOTE: In an event-driven workflow, not all nodes will be triggered (and therefore not all will have nodeRuns).
+ // So we must NOT mark the whole workflow run succeeded based on nodeRuns reaching terminal states.
+ // Success is handled by an explicit terminal event/anchor (see handleEvent).
+
+ // Check if any node failed
+ const hasFailed = nodeRuns.some((nr) => nr.status === 'failed');
+
+ // Check if any node is running
+ const hasRunning = nodeRuns.some((nr) => nr.status === 'running');
+
+ type WorkflowRunStatus = 'pending' | 'running' | 'succeeded' | 'failed' | 'blocked' | 'skipped';
+ let newStatus: WorkflowRunStatus;
+ if (hasFailed) {
+ newStatus = WORKFLOW_RUN_STATUS_FAILED;
+ } else if (hasRunning) {
+ newStatus = WORKFLOW_RUN_STATUS_RUNNING;
+ } else {
+ newStatus = WORKFLOW_RUN_STATUS_PENDING;
+ }
+
+ // Get current workflow run to check if status changed (avoid full table scan)
+ const workflowRun = await this.workflowsRepo.findRunById(workflowRunId);
+
+ if (!workflowRun) {
+ return;
+ }
+
+ // Only update if status changed
+ if (workflowRun.status !== newStatus) {
+ await this.workflowsRepo.updateRun(workflowRunId, {
+ status: newStatus,
+ startedAt: workflowRun.startedAt ? new Date(workflowRun.startedAt) : new Date(),
+ finishedAt: newStatus === WORKFLOW_RUN_STATUS_FAILED ? new Date() : null,
+ });
+ }
+ }
+
+ /**
+ * Update node run status
+ */
+ private async updateNodeRunStatus(
+ nodeRunId: string,
+ status: NodeRunStatus,
+ output?: Record,
+ error?: string
+ ): Promise {
+ const db = await getDb();
+
+ // Get existing node run to preserve output if not provided
+ const [existing] = await db.select().from(nodeRuns).where(eq(nodeRuns.id, nodeRunId)).execute();
+
+ const updateData: {
+ status: NodeRunStatus;
+ finishedAt: Date | null;
+ output?: string;
+ error?: string | null;
+ } = {
+ status,
+ finishedAt:
+ status === NODE_RUN_STATUS_SUCCEEDED || status === NODE_RUN_STATUS_FAILED
+ ? new Date()
+ : null,
+ };
+
+ // Only update output if provided (merge with existing if needed)
+ if (output !== undefined) {
+ const existingOutput =
+ existing && existing.output
+ ? typeof existing.output === 'string'
+ ? JSON.parse(existing.output)
+ : existing.output
+ : {};
+ updateData.output = JSON.stringify({ ...existingOutput, ...output });
+ }
+
+ // Update error if provided
+ if (error !== undefined) {
+ updateData.error = error || null;
+ }
+
+ await db.update(nodeRuns).set(updateData).where(eq(nodeRuns.id, nodeRunId));
+ }
+
+ /**
+ * Apply resource patches
+ * Resolves resource ID by resourceKind (not nodeSpec.subject) and parses expressions in patch values
+ */
+ private async applyResourcePatches(
+ _nodeSpec: NodeSpec,
+ context: EvaluationContext,
+ patches: Record>
+ ): Promise {
+ for (const [resourceKind, patch] of Object.entries(patches)) {
+ if (typeof patch === 'object' && patch !== null) {
+ // Resolve resource ID by resourceKind (not nodeSpec.subject)
+ let resourceId: string | null = null;
+ if (resourceKind === 'workitem') {
+ resourceId =
+ (context.workitem?.id as string | null) ||
+ (context.workItem?.id as string | null) ||
+ null;
+ } else if (resourceKind === 'task') {
+ resourceId = (context.task?.id as string | null) || null;
+ } else if (resourceKind === 'pr_request') {
+ resourceId = (context.pr_request?.id as string | null) || null;
+ } else if (resourceKind === 'worktree') {
+ // Worktree ID is typically the workitem's worktreePath
+ resourceId =
+ (context.workitem?.worktreePath as string | null) ||
+ (context.workItem?.worktreePath as string | null) ||
+ null;
+ }
+
+ if (!resourceId) {
+ console.warn(
+ `[WorkflowExecutionService] Could not resolve resource ID for patch ${resourceKind}`
+ );
+ continue;
+ }
+
+ // Parse expressions in patch values recursively
+ const parsedPatch = await this.parsePatchValues(patch, context);
+
+ // Apply patch based on resource kind
+ if (resourceKind === 'workitem') {
+ const updated = await this.workItemsRepo.update(resourceId, parsedPatch as any);
+ if (updated) {
+ // Emit workitem.updated event to trigger nodes listening to it
+ const updateEvent = workflowEventBus.createEvent(
+ 'workitem.updated',
+ { kind: 'workitem', id: resourceId },
+ {
+ ...parsedPatch,
+ },
+ {
+ causedBy: context.event?.causedBy,
+ }
+ );
+ await eventOutboxService.addEvent(updateEvent);
+ }
+ } else if (resourceKind === 'task') {
+ // Tasks are now separate from AgentRuns - update task status
+ await tasksRepository.update(resourceId, {
+ ...(parsedPatch.status != null ? { status: parsedPatch.status as any } : {}),
+ ...(parsedPatch.currentAgentRunId !== undefined && {
+ currentAgentRunId: parsedPatch.currentAgentRunId as string | null,
+ }),
+ ...(parsedPatch.output !== undefined && {
+ output: parsedPatch.output as Record,
+ }),
+ });
+ console.log(
+ `[WorkflowExecutionService] Applied patch to task ${resourceId}:`,
+ parsedPatch
+ );
+ } else if (resourceKind === 'pr_request') {
+ // PR requests are PullRequests
+ await pullRequestsRepository.update(resourceId, parsedPatch as any);
+ } else {
+ console.log(
+ `[WorkflowExecutionService] Applying patch to ${resourceKind} ${resourceId}:`,
+ parsedPatch
+ );
+ }
+ }
+ }
+ }
+
+ /**
+ * Parse expressions in patch values recursively
+ * Supports both {path} and ctx.path syntax, and 'ctx.path' string literals
+ */
+ private async parsePatchValues(
+ patch: Record,
+ context: EvaluationContext
+ ): Promise