diff --git a/AGENTS.md b/AGENTS.md index d7b5106..6f1d000 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -4,6 +4,13 @@ Every agent working on this Commander project MUST follow these standards. No exceptions. +We use bun run tauri dev to run the app. + +You always work on features that are configurable via the Settings Panel in the app. Every feature must be toggleable or adjustable through user preferences. +Before you write any code, you will write the PRD and save in the docs/ directory. + +You write the TDD and then write the feature implementation. + ## Architecture Pattern - STRICT COMPLIANCE ### Modular Structure (REQUIRED) diff --git a/package.json b/package.json index 3ed7e56..bb9c770 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,8 @@ "test:watch": "vitest --watch" }, "dependencies": { + "@openai/codex": "^0.44.0", + "@openai/codex-sdk": "^0.44.0", "@phosphor-icons/react": "^2.1.10", "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-alert-dialog": "^1.1.15", @@ -30,9 +32,11 @@ "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-toggle": "^1.1.10", "@radix-ui/react-tooltip": "^1.2.8", + "@radix-ui/react-use-controllable-state": "^1.2.2", "@tanstack/react-table": "^8.21.3", "@tauri-apps/api": "^2", "@tauri-apps/plugin-opener": "^2", + "@types/react-syntax-highlighter": "^15.5.13", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "lucide-react": "^0.541.0", @@ -40,21 +44,24 @@ "react": "^19.1.0", "react-dom": "^19.1.0", "react-file-icon": "^1.5.0", + "react-markdown": "^10.1.0", + "react-syntax-highlighter": "^15.6.6", + "remark-gfm": "^4.0.1", "vscode-icons-js": "^11.6.1" }, "devDependencies": { "@tauri-apps/cli": "^2", + "@testing-library/jest-dom": "^6.6.3", + "@testing-library/react": "^16.0.1", "@types/react": "^19.1.8", "@types/react-dom": "^19.1.6", "@vitejs/plugin-react": "^4.6.0", "autoprefixer": "^10.4.21", - "@testing-library/react": "^16.0.1", - "@testing-library/jest-dom": "^6.6.3", "jsdom": "^26.0.0", - "vitest": "^2.0.5", "postcss": "^8.5.6", "tailwindcss": "^3.4.0", "typescript": "~5.8.3", - "vite": "^7.0.4" + "vite": "^7.0.4", + "vitest": "^2.0.5" } } diff --git a/scripts/codex-sdk-runner.mjs b/scripts/codex-sdk-runner.mjs new file mode 100755 index 0000000..aab3079 --- /dev/null +++ b/scripts/codex-sdk-runner.mjs @@ -0,0 +1,109 @@ +#!/usr/bin/env node +import fs from 'fs' +import { pathToFileURL, fileURLToPath } from 'url' + +export async function runCodex(options = {}, io = defaultIO) { + const { + sessionId, + prompt = '', + workingDirectory, + sandboxMode, + model, + skipGitRepoCheck, + } = options + + let CodexModule + const distPath = process.env.CODEX_SDK_DIST_PATH + if (distPath && fs.existsSync(distPath)) { + CodexModule = await import(pathToFileURL(distPath).href) + } else { + CodexModule = await import('@openai/codex-sdk') + } + const { Codex } = CodexModule + + const codexOptions = workingDirectory ? { workingDirectory } : {} + const codex = new Codex(codexOptions) + + const threadOptions = { + ...(model ? { model } : {}), + ...(sandboxMode ? { sandboxMode } : {}), + ...(workingDirectory ? { workingDirectory } : {}), + skipGitRepoCheck: skipGitRepoCheck !== false, + } + + const thread = codex.startThread(threadOptions) + + try { + const { events } = await thread.runStreamed(prompt) + for await (const event of events) { + await io.write( + JSON.stringify({ + sessionId, + content: JSON.stringify(event), + finished: false, + }) + ) + } + + await io.write( + JSON.stringify({ + sessionId, + content: '', + finished: true, + }) + ) + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + const payload = JSON.stringify({ sessionId, error: message, finished: true }) + if (io.writeError) { + await io.writeError(payload) + } else { + await io.write(payload) + } + } +} + +async function readStdin() { + const chunks = [] + for await (const chunk of process.stdin) { + chunks.push(chunk) + } + if (!chunks.length) return '' + return Buffer.concat(chunks.map((c) => (typeof c === 'string' ? Buffer.from(c) : c))).toString('utf8') +} + +async function main() { + const stdin = await readStdin() + const argInput = process.argv[2] + const rawInput = stdin && stdin.trim().length > 0 ? stdin : argInput + + if (!rawInput) { + throw new Error('Missing input payload for Codex SDK runner') + } + + const payload = JSON.parse(rawInput) + await runCodex(payload) +} + +const defaultIO = { + write: async (msg) => { + process.stdout.write(msg + '\n') + }, + writeError: async (msg) => { + process.stderr.write(msg + '\n') + }, +} + +// Check if this script is being run directly +// Use fs.realpathSync to resolve symlinks (like /var -> /private/var on macOS) +const scriptPath = process.argv[1] ? fs.realpathSync(process.argv[1]) : null +const currentPath = import.meta.url ? fs.realpathSync(fileURLToPath(import.meta.url)) : null +const isMainModule = scriptPath && currentPath && scriptPath === currentPath + +if (isMainModule) { + main().catch((err) => { + const message = err instanceof Error ? err.message : String(err) + process.stderr.write(JSON.stringify({ error: message }) + '\n') + process.exit(1) + }) +} diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 24cc6bc..f507982 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -579,6 +579,7 @@ name = "commander" version = "0.1.0" dependencies = [ "assert_cmd", + "async-trait", "chrono", "dirs 5.0.1", "filetime", @@ -588,6 +589,7 @@ dependencies = [ "portable-pty", "regex", "reqwest", + "semver", "serde", "serde_json", "serial_test", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 21d3f8d..221cf80 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -38,6 +38,8 @@ portable-pty = "0.8" which = "6" uuid = { version = "1.0", features = ["v4", "serde"] } regex = "1.10" +async-trait = "0.1" +semver = "1" [dev-dependencies] tokio-test = "0.4" diff --git a/src-tauri/src/commands/chat_history_commands.rs b/src-tauri/src/commands/chat_history_commands.rs index 1d33f2a..6968ec9 100644 --- a/src-tauri/src/commands/chat_history_commands.rs +++ b/src-tauri/src/commands/chat_history_commands.rs @@ -1,11 +1,10 @@ use crate::models::chat_history::*; use crate::services::chat_history_service::{ - ensure_commander_directory, group_messages_into_sessions, - save_chat_session as save_session_impl, - load_chat_sessions as load_sessions_impl, - load_session_messages, delete_chat_session as delete_session_impl, - get_chat_history_stats as get_stats_impl, export_chat_history as export_impl, - migrate_legacy_chat_data as migrate_impl, extract_file_mentions, + delete_chat_session as delete_session_impl, ensure_commander_directory, + export_chat_history as export_impl, extract_file_mentions, + get_chat_history_stats as get_stats_impl, group_messages_into_sessions, + load_chat_sessions as load_sessions_impl, load_session_messages, + migrate_legacy_chat_data as migrate_impl, save_chat_session as save_session_impl, }; /// Save a chat session with its messages @@ -20,7 +19,7 @@ pub async fn save_chat_session( // Group messages into sessions let sessions = group_messages_into_sessions(messages.clone()).await?; - + if sessions.is_empty() { return Err("Failed to create sessions from messages".to_string()); } @@ -31,9 +30,9 @@ pub async fn save_chat_session( let session_messages: Vec = messages .iter() .filter(|msg| { - msg.agent == session.agent && - msg.timestamp >= session.start_time && - msg.timestamp <= session.end_time + msg.agent == session.agent + && msg.timestamp >= session.start_time + && msg.timestamp <= session.end_time }) .cloned() .collect(); @@ -66,18 +65,13 @@ pub async fn get_session_messages( /// Delete a chat session #[tauri::command] -pub async fn delete_chat_session( - project_path: String, - session_id: String, -) -> Result<(), String> { +pub async fn delete_chat_session(project_path: String, session_id: String) -> Result<(), String> { delete_session_impl(&project_path, &session_id).await } /// Get chat history statistics #[tauri::command] -pub async fn get_chat_history_stats( - project_path: String, -) -> Result { +pub async fn get_chat_history_stats(project_path: String) -> Result { get_stats_impl(&project_path).await } @@ -95,7 +89,7 @@ pub async fn export_chat_history( include_metadata, date_range: None, }; - + export_impl(&project_path, request).await } @@ -121,7 +115,7 @@ pub async fn append_chat_message( // Create enhanced message let session_id = uuid::Uuid::new_v4().to_string(); let mut message = EnhancedChatMessage::new(&role, &content, &agent, &session_id); - + // Set metadata message.metadata.branch = branch; message.metadata.working_dir = working_dir; @@ -129,18 +123,19 @@ pub async fn append_chat_message( // Try to find an existing session to append to let recent_sessions = load_sessions_impl(&project_path, Some(1), Some(agent.clone())).await?; - + let session_to_use = if let Some(recent_session) = recent_sessions.first() { // Check if we should append to this session based on timing let time_gap_minutes = (message.timestamp - recent_session.end_time) / 60; if time_gap_minutes <= 5 && recent_session.agent == agent { // Update existing session message.metadata.session_id = recent_session.id.clone(); - + // Load existing messages and append new one - let mut existing_messages = load_session_messages(&project_path, &recent_session.id).await?; + let mut existing_messages = + load_session_messages(&project_path, &recent_session.id).await?; existing_messages.push(message.clone()); - + // Create updated session let updated_sessions = group_messages_into_sessions(existing_messages.clone()).await?; if let Some(updated_session) = updated_sessions.first() { @@ -183,9 +178,9 @@ pub async fn search_chat_history( ) -> Result, String> { let all_sessions = load_sessions_impl(&project_path, None, agent).await?; let query_lower = query.to_lowercase(); - + let mut matching_sessions = Vec::new(); - + for session in all_sessions { // Check if session summary matches if session.summary.to_lowercase().contains(&query_lower) { @@ -195,10 +190,10 @@ pub async fn search_chat_history( // Check if any message in the session matches if let Ok(messages) = load_session_messages(&project_path, &session.id).await { - let has_matching_message = messages.iter().any(|msg| { - msg.content.to_lowercase().contains(&query_lower) - }); - + let has_matching_message = messages + .iter() + .any(|msg| msg.content.to_lowercase().contains(&query_lower)); + if has_matching_message { matching_sessions.push(session); } @@ -221,24 +216,22 @@ pub async fn cleanup_old_sessions( ) -> Result { let cutoff_timestamp = chrono::Utc::now().timestamp() - (retention_days as i64 * 24 * 60 * 60); let all_sessions = load_sessions_impl(&project_path, None, None).await?; - + let mut deleted_count = 0; - + for session in all_sessions { if session.end_time < cutoff_timestamp { delete_session_impl(&project_path, &session.id).await?; deleted_count += 1; } } - + Ok(deleted_count) } /// Validate project has valid chat history structure #[tauri::command] -pub async fn validate_chat_history_structure( - project_path: String, -) -> Result { +pub async fn validate_chat_history_structure(project_path: String) -> Result { // Try to ensure directory exists match ensure_commander_directory(&project_path).await { Ok(_) => Ok(true), @@ -256,11 +249,12 @@ mod tests { // Initialize as git repo let git_dir = temp_dir.path().join(".git"); std::fs::create_dir_all(&git_dir).expect("Failed to create .git directory"); - + // Create a basic git config to mark as valid repo let config_file = git_dir.join("config"); - std::fs::write(config_file, "[core]\nrepositoryformatversion = 0\n").expect("Failed to write git config"); - + std::fs::write(config_file, "[core]\nrepositoryformatversion = 0\n") + .expect("Failed to write git config"); + temp_dir } @@ -286,7 +280,9 @@ mod tests { assert!(!session_id.is_empty(), "Should return session ID"); // Load sessions - let sessions = load_chat_sessions(project_path.clone(), None, None).await.unwrap(); + let sessions = load_chat_sessions(project_path.clone(), None, None) + .await + .unwrap(); assert_eq!(sessions.len(), 1, "Should have one session"); assert_eq!(sessions[0].agent, "claude"); } @@ -298,21 +294,27 @@ mod tests { // Create messages with specific timestamps to ensure same session let base_timestamp = chrono::Utc::now().timestamp(); - - let mut message1 = EnhancedChatMessage::new("user", "First message", "claude", "session-test"); + + let mut message1 = + EnhancedChatMessage::new("user", "First message", "claude", "session-test"); message1.timestamp = base_timestamp; - - let mut message2 = EnhancedChatMessage::new("assistant", "First response", "claude", "session-test"); + + let mut message2 = + EnhancedChatMessage::new("assistant", "First response", "claude", "session-test"); message2.timestamp = base_timestamp + 60; // 1 minute later // Save as session let session_id = save_chat_session( project_path.clone(), - vec![message1.clone(), message2.clone()] - ).await.unwrap(); + vec![message1.clone(), message2.clone()], + ) + .await + .unwrap(); // Load messages - let messages = get_session_messages(project_path, session_id).await.unwrap(); + let messages = get_session_messages(project_path, session_id) + .await + .unwrap(); assert_eq!(messages.len(), 2, "Should have two messages in session"); assert_eq!(messages[0].content, "First message"); assert_eq!(messages[1].content, "First response"); @@ -329,19 +331,17 @@ mod tests { create_test_message("assistant", "I'll help with Rust!", "claude"), ]; - save_chat_session(project_path.clone(), messages).await.unwrap(); + save_chat_session(project_path.clone(), messages) + .await + .unwrap(); // Search for "Rust" - let results = search_chat_history( - project_path.clone(), - "Rust".to_string(), - None, - None, - ).await.unwrap(); + let results = search_chat_history(project_path.clone(), "Rust".to_string(), None, None) + .await + .unwrap(); assert_eq!(results.len(), 1, "Should find one matching session"); - assert!(results[0].summary.contains("Rust") || - results[0].summary.contains("programming")); + assert!(results[0].summary.contains("Rust") || results[0].summary.contains("programming")); } #[tokio::test] @@ -349,18 +349,22 @@ mod tests { let temp_dir = create_test_project_dir(); let project_path = temp_dir.path().to_string_lossy().to_string(); - let messages = vec![ - create_test_message("user", "To be deleted", "claude"), - ]; + let messages = vec![create_test_message("user", "To be deleted", "claude")]; - let session_id = save_chat_session(project_path.clone(), messages).await.unwrap(); + let session_id = save_chat_session(project_path.clone(), messages) + .await + .unwrap(); // Verify it exists - let sessions_before = load_chat_sessions(project_path.clone(), None, None).await.unwrap(); + let sessions_before = load_chat_sessions(project_path.clone(), None, None) + .await + .unwrap(); assert_eq!(sessions_before.len(), 1); // Delete it - delete_chat_session(project_path.clone(), session_id).await.unwrap(); + delete_chat_session(project_path.clone(), session_id) + .await + .unwrap(); // Verify it's gone let sessions_after = load_chat_sessions(project_path, None, None).await.unwrap(); @@ -372,21 +376,21 @@ mod tests { let temp_dir = create_test_project_dir(); let project_path = temp_dir.path().to_string_lossy().to_string(); - let messages = vec![ - create_test_message("user", "Export test", "claude"), - ]; + let messages = vec![create_test_message("user", "Export test", "claude")]; - save_chat_session(project_path.clone(), messages).await.unwrap(); + save_chat_session(project_path.clone(), messages) + .await + .unwrap(); // Export as JSON - let exported = export_chat_history( - project_path, - ExportFormat::Json, - None, - true, - ).await.unwrap(); - - assert!(exported.contains("Export test"), "Should contain message content"); + let exported = export_chat_history(project_path, ExportFormat::Json, None, true) + .await + .unwrap(); + + assert!( + exported.contains("Export test"), + "Should contain message content" + ); assert!(exported.contains("claude"), "Should contain agent name"); } -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/chat_migration_commands.rs b/src-tauri/src/commands/chat_migration_commands.rs index bb2fecd..43a1032 100644 --- a/src-tauri/src/commands/chat_migration_commands.rs +++ b/src-tauri/src/commands/chat_migration_commands.rs @@ -1,6 +1,6 @@ +use crate::commands::git_commands::{load_project_chat, save_project_chat, ChatMessage}; use crate::models::chat_history::*; use crate::services::chat_history_service::*; -use crate::commands::git_commands::{load_project_chat, save_project_chat, ChatMessage}; /// Migrate existing project chat data to new enhanced format #[tauri::command] @@ -10,7 +10,7 @@ pub async fn migrate_project_chat_to_enhanced( ) -> Result { // Load existing chat data let existing_messages = load_project_chat(app.clone(), project_path.clone()).await?; - + if existing_messages.is_empty() { return Ok("No existing chat data to migrate".to_string()); } @@ -32,7 +32,7 @@ pub async fn migrate_project_chat_to_enhanced( // Get migration statistics let sessions = load_chat_sessions(&project_path, None, None).await?; let total_messages: usize = sessions.iter().map(|s| s.message_count).sum(); - + Ok(format!( "Migration completed: {} sessions created with {} total messages", sessions.len(), @@ -45,7 +45,7 @@ pub async fn migrate_project_chat_to_enhanced( pub async fn check_migration_needed(project_path: String) -> Result { // Check if enhanced chat history exists let enhanced_sessions = load_chat_sessions(&project_path, Some(1), None).await?; - + // If no enhanced sessions exist, migration might be needed Ok(enhanced_sessions.is_empty()) } @@ -57,7 +57,7 @@ pub async fn backup_existing_chat_data( project_path: String, ) -> Result { let existing_messages = load_project_chat(app, project_path.clone()).await?; - + if existing_messages.is_empty() { return Ok("No chat data to backup".to_string()); } @@ -65,20 +65,24 @@ pub async fn backup_existing_chat_data( // Create backup in .commander directory let chat_dir = ensure_commander_directory(&project_path).await?; let backup_file = chat_dir.join("chat_backup.json"); - + let backup_data = serde_json::json!({ "backup_date": chrono::Utc::now().to_rfc3339(), "original_messages": existing_messages, "version": "legacy" }); - + let backup_json = serde_json::to_string_pretty(&backup_data) .map_err(|e| format!("Failed to serialize backup data: {}", e))?; - - tokio::fs::write(backup_file, backup_json).await + + tokio::fs::write(backup_file, backup_json) + .await .map_err(|e| format!("Failed to write backup file: {}", e))?; - Ok(format!("Backup created with {} messages", existing_messages.len())) + Ok(format!( + "Backup created with {} messages", + existing_messages.len() + )) } /// Automatically migrate chat data when needed @@ -89,24 +93,24 @@ pub async fn auto_migrate_chat_data( ) -> Result, String> { // Check if migration is needed let needs_migration = check_migration_needed(project_path.clone()).await?; - + if !needs_migration { return Ok(None); } // Check if there's existing data to migrate let existing_messages = load_project_chat(app.clone(), project_path.clone()).await?; - + if existing_messages.is_empty() { return Ok(None); } // Create backup first let backup_result = backup_existing_chat_data(app.clone(), project_path.clone()).await?; - + // Perform migration let migration_result = migrate_project_chat_to_enhanced(app, project_path).await?; - + Ok(Some(format!("{}\n{}", backup_result, migration_result))) } @@ -129,7 +133,8 @@ pub async fn save_enhanced_chat_message( agent.clone(), branch, working_dir, - ).await?; + ) + .await?; // Also save to legacy format for backward compatibility let legacy_message = ChatMessage { @@ -156,14 +161,14 @@ pub async fn get_unified_chat_history( ) -> Result, String> { // Try enhanced format first let enhanced_sessions = load_chat_sessions(&project_path, limit, None).await?; - + if !enhanced_sessions.is_empty() { return Ok(enhanced_sessions); } // Fall back to legacy format with auto-migration let legacy_messages = load_project_chat(app, project_path.clone()).await?; - + if legacy_messages.is_empty() { return Ok(Vec::new()); } @@ -187,7 +192,7 @@ pub async fn get_unified_chat_history( .collect(); let sessions = group_messages_into_sessions(enhanced_messages).await?; - + // Apply limit if specified if let Some(limit) = limit { Ok(sessions.into_iter().take(limit).collect()) @@ -209,11 +214,12 @@ mod tests { // Initialize as git repo let git_dir = temp_dir.path().join(".git"); std::fs::create_dir_all(&git_dir).expect("Failed to create .git directory"); - + // Create a basic git config to mark as valid repo let config_file = git_dir.join("config"); - std::fs::write(config_file, "[core]\nrepositoryformatversion = 0\n").expect("Failed to write git config"); - + std::fs::write(config_file, "[core]\nrepositoryformatversion = 0\n") + .expect("Failed to write git config"); + temp_dir } @@ -241,7 +247,10 @@ mod tests { // For now, just test the path creation logic let chat_dir = ensure_commander_directory(&project_path).await.unwrap(); let backup_file = chat_dir.join("chat_backup.json"); - - assert!(!backup_file.exists(), "Backup file should not exist initially"); + + assert!( + !backup_file.exists(), + "Backup file should not exist initially" + ); } -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/cli_commands.rs b/src-tauri/src/commands/cli_commands.rs index 5775f85..d9065dc 100644 --- a/src-tauri/src/commands/cli_commands.rs +++ b/src-tauri/src/commands/cli_commands.rs @@ -1,25 +1,49 @@ +use once_cell::sync::Lazy; +use portable_pty::{native_pty_system, CommandBuilder, PtySize}; use std::collections::HashMap; -use tauri::Emitter; -use tokio::io::{AsyncBufReadExt, BufReader}; -use tokio::process::{Command, Child}; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; use std::process::Stdio; use std::sync::Arc; +use tauri::Emitter; +use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}; +use tokio::process::{Child, Command}; use tokio::sync::{mpsc, Mutex}; -use once_cell::sync::Lazy; -use portable_pty::{native_pty_system, CommandBuilder, PtySize}; -use crate::models::*; use crate::commands::settings_commands::load_all_agent_settings; -use crate::services::execution_mode_service::{ExecutionMode, codex_flags_for_mode}; +use crate::models::*; +use crate::services::cli_command_builder::build_codex_command_args; +use crate::services::cli_output_service::{sanitize_cli_output_line, CodexStreamAccumulator}; +use crate::services::codex_sdk_service::{build_codex_thread_prefs, CodexThreadPreferences}; +use crate::services::execution_mode_service::ExecutionMode; +use serde::{Deserialize, Serialize}; +use std::process::Command as StdCommand; + +const CODEX_SDK_RUNNER_SOURCE: &str = include_str!("../../../scripts/codex-sdk-runner.mjs"); + +static CODEX_SDK_RUNNER_PATH: Lazy> = Lazy::new(|| { + let mut path = std::env::temp_dir(); + path.push("commander-codex-sdk-runner.mjs"); + + if let Err(e) = fs::write(&path, CODEX_SDK_RUNNER_SOURCE) { + return Err(format!( + "Failed to materialize Codex SDK runner script: {}", + e + )); + } + + Ok(path) +}); // Constants for session management const SESSION_TIMEOUT_SECONDS: i64 = 1800; // 30 minutes -static SESSIONS: Lazy>>> = +static SESSIONS: Lazy>>> = Lazy::new(|| Arc::new(Mutex::new(HashMap::new()))); // Secondary index for O(1) session lookup by agent+working_dir -static SESSION_INDEX: Lazy>>> = +static SESSION_INDEX: Lazy>>> = Lazy::new(|| Arc::new(Mutex::new(HashMap::new()))); // Internal ActiveSession struct for session management (not serializable due to Child process) @@ -64,24 +88,31 @@ fn generate_session_key(agent: &str, working_dir: &Option) -> String { fn get_agent_quit_command(agent: &str) -> &str { match agent { "claude" => "/quit", - "codex" => "/exit", + "codex" => "/exit", "gemini" => "/quit", _ => "/quit", } } -async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri::AppHandle, execution_mode: Option, dangerous_bypass: bool, permission_mode: Option) -> Vec { +async fn build_agent_command_args( + agent: &str, + message: &str, + app_handle: &tauri::AppHandle, + execution_mode: Option, + dangerous_bypass: bool, + permission_mode: Option, +) -> Vec { let mut args = Vec::new(); - + // Try to get agent settings to include model preference - let agent_settings = load_all_agent_settings(app_handle.clone()).await.unwrap_or_else(|_| { - AllAgentSettings { + let agent_settings = load_all_agent_settings(app_handle.clone()) + .await + .unwrap_or_else(|_| AllAgentSettings { claude: AgentSettings::default(), codex: AgentSettings::default(), gemini: AgentSettings::default(), max_concurrent_sessions: 10, - } - }); + }); let current_agent_settings = match agent { "claude" => &agent_settings.claude, @@ -89,7 +120,9 @@ async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri "gemini" => &agent_settings.gemini, _ => &AgentSettings::default(), }; - + + let parsed_execution_mode = execution_mode.as_deref().and_then(ExecutionMode::from_str); + match agent { "claude" => { // Use prompt mode with stream-json for structured output @@ -118,27 +151,12 @@ async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri } } "codex" => { - args.push("exec".to_string()); - - // Add model flag if set in preferences - if let Some(ref model) = current_agent_settings.model { - if !model.is_empty() { - args.push("--model".to_string()); - args.push(model.clone()); - } - } - - // Add flags based on execution mode (if provided) - if let Some(mode_str) = execution_mode { - if let Some(mode) = ExecutionMode::from_str(&mode_str) { - let extra = codex_flags_for_mode(mode, dangerous_bypass && matches!(mode, ExecutionMode::Full)); - args.extend(extra); - } - } - - if !message.is_empty() { - args.push(message.to_string()); - } + args.extend(build_codex_command_args( + message, + parsed_execution_mode, + dangerous_bypass, + Some(current_agent_settings), + )); } "gemini" => { args.push("--prompt".to_string()); @@ -149,7 +167,7 @@ async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri args.push(pm.clone()); } } - + // Add model flag if set in preferences if let Some(ref model) = current_agent_settings.model { if !model.is_empty() { @@ -157,7 +175,7 @@ async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri args.push(model.clone()); } } - + if !message.is_empty() { args.push(message.to_string()); } @@ -169,7 +187,7 @@ async fn build_agent_command_args(agent: &str, message: &str, app_handle: &tauri } } } - + args } @@ -179,13 +197,13 @@ fn parse_command_structure(agent: &str, message: &str) -> (String, String) { // 2. "/claude help" -> agent: "claude", message: "help" // 3. "/help" when agent is already "claude" -> agent: "claude", message: "/help" // 4. "help" when agent is "claude" -> agent: "claude", message: "help" - + if message.starts_with('/') { let parts: Vec<&str> = message.trim_start_matches('/').split_whitespace().collect(); if parts.is_empty() { return (agent.to_string(), "help".to_string()); } - + // Check if first part is an agent name (with aliases) let agent_or_aliases = ["claude", "codex", "gemini", "test", "code", "copilot"]; if agent_or_aliases.contains(&parts[0]) { @@ -195,7 +213,7 @@ fn parse_command_structure(agent: &str, message: &str) -> (String, String) { other => other.to_string(), }; let remaining_parts = &parts[1..]; - + if remaining_parts.is_empty() { // Just "/claude" -> start interactive session (actual_agent, String::new()) @@ -251,41 +269,42 @@ async fn terminate_session_process(session_id: &str) -> Result<(), String> { let mut sessions = SESSIONS.lock().await; sessions.remove(session_id) }; - + if let Some(session) = session_info { // Remove from index as well { - let session_key = generate_session_key(&session.session.agent, &session.session.working_dir); + let session_key = + generate_session_key(&session.session.agent, &session.session.working_dir); let mut session_index = SESSION_INDEX.lock().await; session_index.remove(&session_key); } - + // Send quit command to the process first if let Some(sender) = &session.stdin_sender { let quit_cmd = get_agent_quit_command(&session.session.agent); let _ = sender.send(format!("{}\n", quit_cmd)); - + // Give the process a moment to gracefully exit tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; } - + // Then forcefully kill if still running let mut process_guard = session.process.lock().await; if let Some(mut process) = process_guard.take() { let _ = process.kill().await; } } - + Ok(()) } async fn cleanup_inactive_sessions() -> Result<(), String> { let mut sessions_to_remove = Vec::new(); let current_time = chrono::Utc::now().timestamp(); - + { let sessions = SESSIONS.lock().await; - + for (id, session) in sessions.iter() { // Remove sessions inactive for configured timeout if current_time - session.session.last_activity > SESSION_TIMEOUT_SECONDS { @@ -293,11 +312,11 @@ async fn cleanup_inactive_sessions() -> Result<(), String> { } } } - + for session_id in sessions_to_remove { let _ = terminate_session_process(&session_id).await; } - + Ok(()) } @@ -312,6 +331,7 @@ async fn check_command_available(command: &str) -> bool { async fn try_spawn_with_pty( app: tauri::AppHandle, session_id: String, + agent: &str, program: &str, args: &[String], working_dir: Option, @@ -322,7 +342,10 @@ async fn try_spawn_with_pty( let args_v = args.to_vec(); let session_id_clone = session_id.clone(); + let agent_string = agent.to_string(); + tokio::task::spawn_blocking(move || -> Result<(), String> { + let agent_ref = agent_string; let pty_system = native_pty_system(); let pair = pty_system .openpty(PtySize { @@ -357,20 +380,48 @@ async fn try_spawn_with_pty( // Read loop: emit chunks as they arrive let mut buf = [0u8; 4096]; + let mut codex_accumulator = if agent_ref.eq_ignore_ascii_case("codex") { + Some(CodexStreamAccumulator::new()) + } else { + None + }; + loop { match std::io::Read::read(&mut reader, &mut buf) { Ok(0) => break, // EOF Ok(n) => { let text = String::from_utf8_lossy(&buf[..n]).to_string(); - // Emit synchronously — safe on main thread; tauri queues it. - let _ = app_clone.emit( - "cli-stream", - StreamChunk { - session_id: session_id_clone.clone(), - content: text, - finished: false, - }, - ); + if let Some(acc) = codex_accumulator.as_mut() { + for segment in acc.push_chunk(&text) { + if let Some(filtered) = sanitize_cli_output_line(&agent_ref, &segment) { + let _ = app_clone.emit( + "cli-stream", + StreamChunk { + session_id: session_id_clone.clone(), + content: filtered, + finished: false, + }, + ); + } + } + } else { + for line in text.split_inclusive(['\n', '\r']) { + let trimmed = line.trim_end_matches(['\n', '\r']); + if trimmed.is_empty() { + continue; + } + if let Some(filtered) = sanitize_cli_output_line(&agent_ref, trimmed) { + let _ = app_clone.emit( + "cli-stream", + StreamChunk { + session_id: session_id_clone.clone(), + content: format!("{}\n", filtered), + finished: false, + }, + ); + } + } + } } Err(e) => { let _ = app_clone.emit( @@ -390,6 +441,20 @@ async fn try_spawn_with_pty( let status = child .wait() .map_err(|e| format!("Failed to wait on PTY child: {}", e))?; + if let Some(mut acc) = codex_accumulator { + if let Some(remaining) = acc.flush() { + if let Some(filtered) = sanitize_cli_output_line(&agent_ref, &remaining) { + let _ = app_clone.emit( + "cli-stream", + StreamChunk { + session_id: session_id_clone.clone(), + content: filtered, + finished: false, + }, + ); + } + } + } let _ = app_clone.emit( "cli-stream", StreamChunk { @@ -410,6 +475,202 @@ async fn try_spawn_with_pty( Ok(()) } +#[derive(Debug, Serialize)] +struct CodexSdkInvocation { + #[serde(rename = "sessionId")] + session_id: String, + prompt: String, + #[serde(rename = "workingDirectory", skip_serializing_if = "Option::is_none")] + working_directory: Option, + #[serde(rename = "sandboxMode", skip_serializing_if = "Option::is_none")] + sandbox_mode: Option, + #[serde(rename = "model", skip_serializing_if = "Option::is_none")] + model: Option, + #[serde(rename = "skipGitRepoCheck")] + skip_git_repo_check: bool, +} + +#[derive(Debug, Deserialize)] +struct CodexSdkBridgeMessage { + #[serde(rename = "sessionId")] + session_id: Option, + content: Option, + error: Option, + #[serde(default)] + finished: bool, +} + +fn resolve_codex_runner_path() -> Result { + match CODEX_SDK_RUNNER_PATH.as_ref() { + Ok(path) => Ok(path.clone()), + Err(err) => Err(err.clone()), + } +} + +async fn try_spawn_codex_sdk( + app: tauri::AppHandle, + session_id: String, + prompt: String, + working_dir: Option, + prefs: CodexThreadPreferences, + model: Option, +) -> Result<(), String> { + let script_path = resolve_codex_runner_path()?; + + let mut cmd = Command::new("node"); + cmd.arg( + script_path + .to_str() + .ok_or_else(|| "Invalid script path".to_string())?, + ) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let node_modules_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../node_modules"); + if let Ok(canonical) = fs::canonicalize(&node_modules_dir) { + cmd.env("NODE_PATH", &canonical); + + let sdk_dist_path = canonical.join("@openai/codex-sdk/dist/index.js"); + if sdk_dist_path.exists() { + cmd.env("CODEX_SDK_DIST_PATH", sdk_dist_path); + } + } + + if let Some(dir) = &working_dir { + cmd.current_dir(dir); + } + + let mut child = cmd + .spawn() + .map_err(|e| format!("Failed to spawn Codex SDK runner: {}", e))?; + + let config = CodexSdkInvocation { + session_id: session_id.clone(), + prompt, + working_directory: working_dir.clone(), + sandbox_mode: prefs.sandbox_mode.clone(), + model, + skip_git_repo_check: prefs.skip_git_repo_check, + }; + + if let Some(mut stdin) = child.stdin.take() { + let payload = serde_json::to_string(&config) + .map_err(|e| format!("Failed to serialize Codex SDK config: {}", e))?; + tokio::spawn(async move { + let _ = stdin.write_all(payload.as_bytes()).await; + let _ = stdin.shutdown().await; + }); + } + + if let Some(stdout) = child.stdout.take() { + let app_for_stdout = app.clone(); + let session_for_stdout = session_id.clone(); + tokio::spawn(async move { + let reader = BufReader::new(stdout); + let mut lines = reader.lines(); + + while let Ok(Some(line)) = lines.next_line().await { + if line.trim().is_empty() { + continue; + } + + let parsed: Result = serde_json::from_str(&line); + match parsed { + Ok(msg) => { + let sid = msg.session_id.unwrap_or_else(|| session_for_stdout.clone()); + + if let Some(error) = msg.error { + let chunk = StreamChunk { + session_id: sid, + content: format!("❌ Codex error: {}\n", error), + finished: msg.finished, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } else if let Some(content) = msg.content { + let chunk = StreamChunk { + session_id: sid, + content, + finished: msg.finished, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } + } + Err(_) => { + let chunk = StreamChunk { + session_id: session_for_stdout.clone(), + content: line + "\n", + finished: false, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } + } + } + }); + } + + if let Some(stderr) = child.stderr.take() { + let app_for_stderr = app.clone(); + let session_for_stderr = session_id.clone(); + tokio::spawn(async move { + let reader = BufReader::new(stderr); + let mut lines = reader.lines(); + + while let Ok(Some(line)) = lines.next_line().await { + if line.trim().is_empty() { + continue; + } + let parsed: Result = serde_json::from_str(&line); + match parsed { + Ok(msg) => { + let sid = msg.session_id.unwrap_or_else(|| session_for_stderr.clone()); + if let Some(error) = msg.error { + let chunk = StreamChunk { + session_id: sid, + content: format!("❌ Codex error: {}\n", error), + finished: msg.finished, + }; + let _ = app_for_stderr.emit("cli-stream", chunk); + } + } + Err(_) => { + // Ignore non-JSON stderr lines (debug logging from SDK runner) + } + } + } + }); + } + + match child.wait().await { + Ok(status) => { + if status.success() { + let _ = app.emit( + "cli-stream", + StreamChunk { + session_id, + content: "\n✅ Command completed successfully\n".to_string(), + finished: true, + }, + ); + } else { + let _ = app.emit( + "cli-stream", + StreamChunk { + session_id, + content: format!( + "\n❌ Codex SDK runner exited with status {}\n", + status.code().unwrap_or(-1) + ), + finished: true, + }, + ); + } + Ok(()) + } + Err(e) => Err(format!("Failed to wait for Codex SDK runner: {}", e)), + } +} + #[tauri::command] pub async fn execute_persistent_cli_command( app: tauri::AppHandle, @@ -421,15 +682,18 @@ pub async fn execute_persistent_cli_command( dangerousBypass: Option, permissionMode: Option, ) -> Result<(), String> { - println!("🔍 BACKEND RECEIVED - Agent: {}, Working Dir: {:?}", agent, working_dir); + println!( + "🔍 BACKEND RECEIVED - Agent: {}, Working Dir: {:?}", + agent, working_dir + ); let app_clone = app.clone(); let session_id_clone = session_id.clone(); let _current_time = chrono::Utc::now().timestamp(); - + tokio::spawn(async move { // Parse command structure to handle both "/agent subcommand" and direct subcommands let (agent_name, actual_message) = parse_command_structure(&agent, &message); - + // Emit session status info let info_chunk = StreamChunk { session_id: session_id_clone.clone(), @@ -437,24 +701,71 @@ pub async fn execute_persistent_cli_command( finished: false, }; let _ = app_clone.emit("cli-stream", info_chunk); - + + let dangerous_bypass = dangerousBypass.unwrap_or(false); + + if agent_name.eq_ignore_ascii_case("codex") { + let all_agent_settings = load_all_agent_settings(app_clone.clone()) + .await + .unwrap_or_else(|_| AllAgentSettings { + claude: AgentSettings::default(), + codex: AgentSettings::default(), + gemini: AgentSettings::default(), + max_concurrent_sessions: 10, + }); + + let current_agent_settings = all_agent_settings.codex.clone(); + let parsed_execution_mode = execution_mode.as_deref().and_then(ExecutionMode::from_str); + let prefs = build_codex_thread_prefs(parsed_execution_mode, dangerous_bypass); + let model = current_agent_settings.model.clone(); + + match try_spawn_codex_sdk( + app_clone.clone(), + session_id_clone.clone(), + actual_message.clone(), + working_dir.clone(), + prefs, + model, + ) + .await + { + Ok(()) => { + return; + }, + Err(err) => { + let fallback_chunk = StreamChunk { + session_id: session_id_clone.clone(), + content: format!( + "ℹ️ Codex SDK runner unavailable ({}). Falling back to CLI…\n", + err + ), + finished: false, + }; + let _ = app_clone.emit("cli-stream", fallback_chunk); + } + } + } + // Check if command is available if !check_command_available(&agent_name).await { let error_chunk = StreamChunk { session_id: session_id_clone.clone(), - content: format!("❌ Command '{}' not found. Please install it first:\n\n", agent_name), + content: format!( + "❌ Command '{}' not found. Please install it first:\n\n", + agent_name + ), finished: false, }; let _ = app_clone.emit("cli-stream", error_chunk); - + // Provide installation instructions let install_instructions = match agent_name.as_str() { "claude" => "Install Claude CLI: https://docs.anthropic.com/claude/docs/cli\n", - "codex" => "Install GitHub Copilot CLI: https://github.com/features/copilot\n", + "codex" => "Install GitHub Copilot CLI: https://github.com/features/copilot\n", "gemini" => "Install Gemini CLI: https://cloud.google.com/sdk/docs/install\n", _ => "Please check the official documentation for installation instructions.\n", }; - + let instruction_chunk = StreamChunk { session_id: session_id_clone, content: install_instructions.to_string(), @@ -463,27 +774,48 @@ pub async fn execute_persistent_cli_command( let _ = app_clone.emit("cli-stream", instruction_chunk); return; } - + // Build args once - let command_args = build_agent_command_args(&agent_name, &actual_message, &app_clone, execution_mode.clone(), dangerousBypass.unwrap_or(false), permissionMode.clone()).await; + let command_args = build_agent_command_args( + &agent_name, + &actual_message, + &app_clone, + execution_mode.clone(), + dangerous_bypass, + permissionMode.clone(), + ) + .await; // Resolve absolute path of the executable to avoid PATH issues in GUI contexts let resolved_prog = which::which(&agent_name) .map(|p| p.to_string_lossy().to_string()) .unwrap_or(agent_name.clone()); - // Decide spawn strategy: - // When a specific working_dir is requested we prefer pipe streaming with explicit current_dir - // for maximum reliability across platforms. Otherwise try PTY first for richer streaming. - // ALWAYS use pipe method when working_dir is specified to ensure directory is respected - if working_dir.is_none() { - if let Err(e) = try_spawn_with_pty(app_clone.clone(), session_id_clone.clone(), &resolved_prog, &command_args, working_dir.clone()).await { + // Prefer PTY for richer streaming – Codex in particular emits carriage-return updates that + // disappear when spawned via plain pipes. `try_spawn_with_pty` respects the working + // directory, so we can safely attempt it regardless of `working_dir`. + let prefer_pty = working_dir.is_none() || agent_name.eq_ignore_ascii_case("codex"); + + if prefer_pty { + if let Err(e) = try_spawn_with_pty( + app_clone.clone(), + session_id_clone.clone(), + &agent_name, + &resolved_prog, + &command_args, + working_dir.clone(), + ) + .await + { // Inform about PTY fallback let _ = app_clone.emit( "cli-stream", StreamChunk { session_id: session_id_clone.clone(), - content: format!("ℹ️ PTY unavailable ({}). Falling back to pipe streaming...\n", e), + content: format!( + "ℹ️ PTY unavailable ({}). Falling back to pipe streaming...\n", + e + ), finished: false, }, ); @@ -510,40 +842,150 @@ pub async fn execute_persistent_cli_command( if let Some(stdout) = child.stdout.take() { let app_for_stdout = app_clone.clone(); let session_id_for_stdout = session_id_clone.clone(); + let agent_for_stdout = agent_name.clone(); tokio::spawn(async move { - let reader = BufReader::new(stdout); - let mut lines = reader.lines(); - - while let Ok(Some(line)) = lines.next_line().await { - let chunk = StreamChunk { - session_id: session_id_for_stdout.clone(), - content: line + "\n", - finished: false, - }; - let _ = app_for_stdout.emit("cli-stream", chunk); + if agent_for_stdout.eq_ignore_ascii_case("codex") { + let mut reader = BufReader::new(stdout); + let mut buf = vec![0u8; 4096]; + let mut accumulator = CodexStreamAccumulator::new(); + + loop { + match reader.read(&mut buf).await { + Ok(0) => break, + Ok(n) => { + let text = String::from_utf8_lossy(&buf[..n]); + for segment in accumulator.push_chunk(text.as_ref()) { + if let Some(filtered) = sanitize_cli_output_line( + &agent_for_stdout, + &segment, + ) { + let chunk = StreamChunk { + session_id: session_id_for_stdout.clone(), + content: filtered, + finished: false, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } + } + } + Err(e) => { + let chunk = StreamChunk { + session_id: session_id_for_stdout.clone(), + content: format!("ERROR: {}\n", e), + finished: false, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + break; + } + } + } + + if let Some(remaining) = accumulator.flush() { + if let Some(filtered) = + sanitize_cli_output_line(&agent_for_stdout, &remaining) + { + let chunk = StreamChunk { + session_id: session_id_for_stdout, + content: filtered, + finished: false, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } + } + } else { + let reader = BufReader::new(stdout); + let mut lines = reader.lines(); + + while let Ok(Some(line)) = lines.next_line().await { + if let Some(filtered) = + sanitize_cli_output_line(&agent_for_stdout, &line) + { + let chunk = StreamChunk { + session_id: session_id_for_stdout.clone(), + content: filtered + "\n", + finished: false, + }; + let _ = app_for_stdout.emit("cli-stream", chunk); + } + } } }); } - + // Stream stderr if let Some(stderr) = child.stderr.take() { let app_for_stderr = app_clone.clone(); let session_id_for_stderr = session_id_clone.clone(); + let agent_for_stderr = agent_name.clone(); tokio::spawn(async move { - let reader = BufReader::new(stderr); - let mut lines = reader.lines(); - - while let Ok(Some(line)) = lines.next_line().await { - let chunk = StreamChunk { - session_id: session_id_for_stderr.clone(), - content: format!("ERROR: {}\n", line), - finished: false, - }; - let _ = app_for_stderr.emit("cli-stream", chunk); + if agent_for_stderr.eq_ignore_ascii_case("codex") { + let mut reader = BufReader::new(stderr); + let mut buf = vec![0u8; 4096]; + let mut accumulator = CodexStreamAccumulator::new(); + + loop { + match reader.read(&mut buf).await { + Ok(0) => break, + Ok(n) => { + let text = String::from_utf8_lossy(&buf[..n]); + for segment in accumulator.push_chunk(text.as_ref()) { + if let Some(filtered) = sanitize_cli_output_line( + &agent_for_stderr, + &segment, + ) { + let chunk = StreamChunk { + session_id: session_id_for_stderr.clone(), + content: format!("ERROR: {}\n", filtered), + finished: false, + }; + let _ = app_for_stderr.emit("cli-stream", chunk); + } + } + } + Err(e) => { + let chunk = StreamChunk { + session_id: session_id_for_stderr.clone(), + content: format!("ERROR: {}\n", e), + finished: false, + }; + let _ = app_for_stderr.emit("cli-stream", chunk); + break; + } + } + } + + if let Some(remaining) = accumulator.flush() { + if let Some(filtered) = + sanitize_cli_output_line(&agent_for_stderr, &remaining) + { + let chunk = StreamChunk { + session_id: session_id_for_stderr, + content: format!("ERROR: {}\n", filtered), + finished: false, + }; + let _ = app_for_stderr.emit("cli-stream", chunk); + } + } + } else { + let reader = BufReader::new(stderr); + let mut lines = reader.lines(); + + while let Ok(Some(line)) = lines.next_line().await { + if let Some(filtered) = + sanitize_cli_output_line(&agent_for_stderr, &line) + { + let chunk = StreamChunk { + session_id: session_id_for_stderr.clone(), + content: format!("ERROR: {}\n", filtered), + finished: false, + }; + let _ = app_for_stderr.emit("cli-stream", chunk); + } + } } }); } - + // Wait for completion match child.wait().await { Ok(status) => { @@ -552,7 +994,10 @@ pub async fn execute_persistent_cli_command( content: if status.success() { "\n✅ Command completed successfully\n".to_string() } else { - format!("\n❌ Command failed with exit code: {}\n", status.code().unwrap_or(-1)) + format!( + "\n❌ Command failed with exit code: {}\n", + status.code().unwrap_or(-1) + ) }, finished: true, }; @@ -574,7 +1019,7 @@ pub async fn execute_persistent_cli_command( } else { format!("Failed to start {}: {}", agent_name, e) }; - + let error_chunk = StreamChunk { session_id: session_id_clone.clone(), content: format!("❌ {}\n", error_message), @@ -585,7 +1030,7 @@ pub async fn execute_persistent_cli_command( } } }); - + Ok(()) } @@ -602,62 +1047,94 @@ pub async fn execute_cli_command( ) -> Result<(), String> { // Legacy function - redirect to persistent session handler let message = args.join(" "); - execute_persistent_cli_command(app, session_id, command, message, working_dir, execution_mode, dangerousBypass, permissionMode).await + execute_persistent_cli_command( + app, + session_id, + command, + message, + working_dir, + execution_mode, + dangerousBypass, + permissionMode, + ) + .await } #[tauri::command] pub async fn execute_claude_command( app: tauri::AppHandle, - #[allow(non_snake_case)] - sessionId: String, + #[allow(non_snake_case)] sessionId: String, message: String, - #[allow(non_snake_case)] - working_dir: Option, + #[allow(non_snake_case)] workingDir: Option, ) -> Result<(), String> { - execute_persistent_cli_command(app, sessionId, "claude".to_string(), message, working_dir, None, None, None).await + execute_persistent_cli_command( + app, + sessionId, + "claude".to_string(), + message, + workingDir, + None, + None, + None, + ) + .await } #[tauri::command] pub async fn execute_codex_command( app: tauri::AppHandle, - #[allow(non_snake_case)] - sessionId: String, + #[allow(non_snake_case)] sessionId: String, message: String, - #[allow(non_snake_case)] - working_dir: Option, + #[allow(non_snake_case)] workingDir: Option, executionMode: Option, dangerousBypass: Option, permissionMode: Option, ) -> Result<(), String> { - execute_persistent_cli_command(app, sessionId, "codex".to_string(), message, working_dir, executionMode, dangerousBypass, permissionMode).await + execute_persistent_cli_command( + app, + sessionId, + "codex".to_string(), + message, + workingDir, + executionMode, + dangerousBypass, + permissionMode, + ) + .await } #[tauri::command] pub async fn execute_gemini_command( app: tauri::AppHandle, - #[allow(non_snake_case)] - sessionId: String, + #[allow(non_snake_case)] sessionId: String, message: String, - #[allow(non_snake_case)] - working_dir: Option, + #[allow(non_snake_case)] workingDir: Option, ) -> Result<(), String> { - execute_persistent_cli_command(app, sessionId, "gemini".to_string(), message, working_dir, None, None, None).await + execute_persistent_cli_command( + app, + sessionId, + "gemini".to_string(), + message, + workingDir, + None, + None, + None, + ) + .await } // Test command to demonstrate CLI streaming (this will always work) #[tauri::command] pub async fn execute_test_command( app: tauri::AppHandle, - #[allow(non_snake_case)] - sessionId: String, + #[allow(non_snake_case)] sessionId: String, message: String, - #[allow(non_snake_case)] - working_dir: Option, + #[allow(non_snake_case)] workingDir: Option, ) -> Result<(), String> { let app_clone = app.clone(); let session_id_clone = sessionId.clone(); - let _ = working_dir; // currently unused - + let _ = workingDir; // currently unused + tokio::spawn(async move { // Simulate streaming response for testing let user_message = format!("💭 You said: {}", message); @@ -668,10 +1145,10 @@ pub async fn execute_test_command( "✅ CLI streaming is working correctly!".to_string(), "🚀 All systems operational.".to_string(), ]; - + for (i, line) in lines.iter().enumerate() { tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; - + let chunk = StreamChunk { session_id: session_id_clone.clone(), content: format!("{}\n", line), @@ -680,7 +1157,7 @@ pub async fn execute_test_command( let _ = app_clone.emit("cli-stream", chunk); } }); - + Ok(()) } @@ -691,12 +1168,12 @@ pub async fn cleanup_cli_sessions() -> Result<(), String> { pub async fn get_sessions_status() -> Result { let sessions = SESSIONS.lock().await; - + let active_sessions: Vec = sessions .values() .map(|session| session.session.clone()) .collect(); - + Ok(SessionStatus { active_sessions: active_sessions.clone(), total_sessions: active_sessions.len(), @@ -712,21 +1189,22 @@ pub async fn terminate_all_active_sessions() -> Result<(), String> { let sessions = SESSIONS.lock().await; sessions.keys().cloned().collect() }; - + for session_id in session_ids { let _ = terminate_session_process(&session_id).await; } - + Ok(()) } pub async fn send_quit_to_session(session_id: &str) -> Result<(), String> { let sessions = SESSIONS.lock().await; - + if let Some(session) = sessions.get(session_id) { if let Some(ref sender) = session.stdin_sender { let quit_cmd = get_agent_quit_command(&session.session.agent); - sender.send(format!("{}\n", quit_cmd)) + sender + .send(format!("{}\n", quit_cmd)) .map_err(|e| format!("Failed to send quit command: {}", e))?; } else { return Err("Session stdin not available".to_string()); @@ -734,6 +1212,42 @@ pub async fn send_quit_to_session(session_id: &str) -> Result<(), String> { } else { return Err("Session not found".to_string()); } - + + Ok(()) +} + +#[tauri::command] +pub fn open_file_in_editor(file_path: String) -> Result<(), String> { + let path = Path::new(&file_path); + + if !path.exists() { + return Err(format!("File does not exist: {}", file_path)); + } + + #[cfg(target_os = "macos")] + { + StdCommand::new("open") + .arg("-t") + .arg(file_path) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + + #[cfg(target_os = "windows")] + { + StdCommand::new("cmd") + .args(&["/C", "start", "", &file_path]) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + + #[cfg(target_os = "linux")] + { + StdCommand::new("xdg-open") + .arg(file_path) + .spawn() + .map_err(|e| format!("Failed to open file: {}", e))?; + } + Ok(()) } diff --git a/src-tauri/src/commands/file_commands.rs b/src-tauri/src/commands/file_commands.rs index 0a9ac16..06b570c 100644 --- a/src-tauri/src/commands/file_commands.rs +++ b/src-tauri/src/commands/file_commands.rs @@ -1,6 +1,6 @@ -use std::path::{Path, PathBuf}; -use std::fs; use std::env; +use std::fs; +use std::path::{Path, PathBuf}; use crate::models::*; use crate::services::file_service; @@ -10,10 +10,12 @@ fn is_valid_file_extension(path: &Path, allowed_extensions: &[&str]) -> bool { if allowed_extensions.is_empty() { return true; // No filtering if no extensions specified } - + if let Some(ext) = path.extension() { if let Some(ext_str) = ext.to_str() { - return allowed_extensions.iter().any(|&allowed| allowed.eq_ignore_ascii_case(ext_str)); + return allowed_extensions + .iter() + .any(|&allowed| allowed.eq_ignore_ascii_case(ext_str)); } } false @@ -21,14 +23,27 @@ fn is_valid_file_extension(path: &Path, allowed_extensions: &[&str]) -> bool { fn should_skip_directory(dir_name: &str) -> bool { // Skip common directories that shouldn't be indexed for file mentions - matches!(dir_name, - ".git" | ".svn" | ".hg" | - "node_modules" | ".next" | ".nuxt" | "dist" | "build" | "out" | - "target" | "Cargo.lock" | - ".vscode" | ".idea" | - "__pycache__" | ".pytest_cache" | - ".DS_Store" | "Thumbs.db" | - "coverage" | ".nyc_output" + matches!( + dir_name, + ".git" + | ".svn" + | ".hg" + | "node_modules" + | ".next" + | ".nuxt" + | "dist" + | "build" + | "out" + | "target" + | "Cargo.lock" + | ".vscode" + | ".idea" + | "__pycache__" + | ".pytest_cache" + | ".DS_Store" + | "Thumbs.db" + | "coverage" + | ".nyc_output" ) } @@ -44,14 +59,13 @@ fn collect_files_recursive( } let mut files = Vec::new(); - + let entries = fs::read_dir(dir_path) .map_err(|e| format!("Failed to read directory {}: {}", dir_path.display(), e))?; for entry in entries { - let entry = entry - .map_err(|e| format!("Failed to process directory entry: {}", e))?; - + let entry = entry.map_err(|e| format!("Failed to process directory entry: {}", e))?; + let entry_path = entry.path(); let file_name = entry.file_name(); let file_name_str = file_name.to_string_lossy().to_string(); @@ -66,17 +80,16 @@ fn collect_files_recursive( if should_skip_directory(&file_name_str) { continue; } - + // Recursively collect files from subdirectories let mut subdir_files = collect_files_recursive( - &entry_path, - base_path, - allowed_extensions, - max_depth, - current_depth + 1 + &entry_path, + base_path, + allowed_extensions, + max_depth, + current_depth + 1, )?; files.append(&mut subdir_files); - } else if entry_path.is_file() { // Check if file has allowed extension if is_valid_file_extension(&entry_path, allowed_extensions) { @@ -109,22 +122,22 @@ fn collect_files_recursive( pub async fn get_current_working_directory() -> Result { let current_dir = env::current_dir() .map_err(|e| format!("Failed to get current working directory: {}", e))?; - + Ok(current_dir.to_string_lossy().to_string()) } #[tauri::command] pub async fn set_current_working_directory(path: String) -> Result<(), String> { let path = Path::new(&path); - + if !path.exists() { return Err(format!("Directory does not exist: {}", path.display())); } - + if !path.is_dir() { return Err(format!("Path is not a directory: {}", path.display())); } - + env::set_current_dir(path) .map_err(|e| format!("Failed to set current working directory: {}", e)) } @@ -141,15 +154,15 @@ pub async fn list_files_in_directory( None => env::current_dir() .map_err(|e| format!("Failed to get current working directory: {}", e))?, }; - + if !base_path.exists() { return Err(format!("Directory does not exist: {}", base_path.display())); } - + if !base_path.is_dir() { return Err(format!("Path is not a directory: {}", base_path.display())); } - + // Set reasonable defaults let max_depth = max_depth.unwrap_or(5); // Max 5 levels deep let allowed_extensions: Vec<&str> = extensions @@ -158,20 +171,20 @@ pub async fn list_files_in_directory( .unwrap_or_else(|| { // Default to common code file extensions vec![ - "rs", "js", "ts", "tsx", "jsx", "py", "java", "c", "cpp", "h", "hpp", - "go", "php", "rb", "swift", "kt", "cs", "dart", "vue", "svelte", - "html", "css", "scss", "sass", "less", "md", "txt", "json", "yaml", "yml", - "toml", "xml", "sql", "sh", "bash", "zsh", "fish", "ps1", "bat", "cmd" + "rs", "js", "ts", "tsx", "jsx", "py", "java", "c", "cpp", "h", "hpp", "go", "php", + "rb", "swift", "kt", "cs", "dart", "vue", "svelte", "html", "css", "scss", "sass", + "less", "md", "txt", "json", "yaml", "yml", "toml", "xml", "sql", "sh", "bash", + "zsh", "fish", "ps1", "bat", "cmd", ] }); - + // Collect files recursively let files = collect_files_recursive(&base_path, &base_path, &allowed_extensions, max_depth, 0)?; - + // Sort files by relative path for consistent ordering let mut sorted_files = files; sorted_files.sort_by(|a, b| a.relative_path.cmp(&b.relative_path)); - + Ok(DirectoryListing { current_directory: base_path.to_string_lossy().to_string(), files: sorted_files, @@ -188,21 +201,21 @@ pub async fn search_files_by_name( if search_term.trim().is_empty() { return Err("Search term cannot be empty".to_string()); } - + // Get all files first let listing = list_files_in_directory(directory_path, extensions, max_depth).await?; - + // Filter by search term (case-insensitive) let search_lower = search_term.to_lowercase(); let filtered_files: Vec = listing .files .into_iter() .filter(|file| { - file.name.to_lowercase().contains(&search_lower) || - file.relative_path.to_lowercase().contains(&search_lower) + file.name.to_lowercase().contains(&search_lower) + || file.relative_path.to_lowercase().contains(&search_lower) }) .collect(); - + Ok(DirectoryListing { current_directory: listing.current_directory, files: filtered_files, @@ -212,11 +225,11 @@ pub async fn search_files_by_name( #[tauri::command] pub async fn get_file_info(file_path: String) -> Result, String> { let path = Path::new(&file_path); - + if !path.exists() { return Ok(None); } - + // Get the parent directory to create relative path let parent = path.parent().unwrap_or(Path::new("")); let relative_path = path @@ -224,14 +237,15 @@ pub async fn get_file_info(file_path: String) -> Result, String .unwrap_or(path) .to_string_lossy() .to_string(); - + let extension = path .extension() .and_then(|ext| ext.to_str()) .map(|s| s.to_string()); - + Ok(Some(FileInfo { - name: path.file_name() + name: path + .file_name() .map(|n| n.to_string_lossy().to_string()) .unwrap_or_else(|| "Unknown".to_string()), path: path.to_string_lossy().to_string(), diff --git a/src-tauri/src/commands/git_commands.rs b/src-tauri/src/commands/git_commands.rs index a1d7f6e..0c76860 100644 --- a/src-tauri/src/commands/git_commands.rs +++ b/src-tauri/src/commands/git_commands.rs @@ -1,26 +1,26 @@ +use crate::services::git_service; use std::collections::HashMap; -use tauri::Emitter; use std::path::Path; -use crate::services::git_service; use std::path::PathBuf; +use tauri::Emitter; #[tauri::command] pub async fn validate_git_repository_url(url: String) -> Result { use std::process::Stdio; - + // Validate that git is available let git_check = tokio::process::Command::new("git") .arg("--version") .output() .await; - + match git_check { Ok(output) if !output.status.success() => { return Err("Git is not installed or not available in PATH".to_string()); - }, + } Err(_) => { return Err("Git is not installed or not available in PATH".to_string()); - }, + } _ => {} } @@ -45,13 +45,13 @@ pub async fn validate_git_repository_url(url: String) -> Result { #[tauri::command] pub async fn clone_repository( app: tauri::AppHandle, - url: String, - destination: String + url: String, + destination: String, ) -> Result { - use tokio::process::Command; - use tokio::io::{AsyncBufReadExt, BufReader}; use std::process::Stdio; - + use tokio::io::{AsyncBufReadExt, BufReader}; + use tokio::process::Command; + // Create parent directory if it doesn't exist if let Some(parent) = std::path::Path::new(&destination).parent() { if let Err(e) = std::fs::create_dir_all(parent) { @@ -72,7 +72,7 @@ pub async fn clone_repository( if let Some(stderr) = child.stderr.take() { let reader = BufReader::new(stderr); let mut lines = reader.lines(); - + while let Some(line) = lines.next_line().await.unwrap_or(None) { // Emit progress to frontend let _ = app.emit("clone-progress", line.clone()); @@ -80,7 +80,9 @@ pub async fn clone_repository( } // Wait for the process to complete - let status = child.wait().await + let status = child + .wait() + .await .map_err(|e| format!("Failed to wait for git clone: {}", e))?; if !status.success() { @@ -184,7 +186,8 @@ pub async fn get_git_worktree_enabled() -> Result { #[tauri::command] pub async fn get_git_worktree_preference(app: tauri::AppHandle) -> Result { use tauri_plugin_store::StoreExt; - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; let value = store.get("git_worktree_enabled").and_then(|v| v.as_bool()); Ok(value.unwrap_or(true)) @@ -193,16 +196,18 @@ pub async fn get_git_worktree_preference(app: tauri::AppHandle) -> Result Result<(), String> { use tauri_plugin_store::StoreExt; - + // Save the workspace (git worktree) preference to app settings - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + store.set("git_worktree_enabled", serde_json::Value::Bool(enabled)); - - store.save() + + store + .save() .map_err(|e| format!("Failed to persist git worktree setting: {}", e))?; - + Ok(()) } @@ -256,23 +261,23 @@ pub fn is_valid_git_repository(path: &Path) -> bool { #[tauri::command] pub async fn validate_git_repository(project_path: String) -> Result { let path = Path::new(&project_path); - + if !path.exists() || !path.is_dir() { return Ok(false); } - + Ok(is_valid_git_repository(path)) } #[tauri::command] pub async fn select_git_project_folder(app: tauri::AppHandle) -> Result, String> { - use tauri_plugin_dialog::DialogExt; - use std::sync::{Arc, Mutex}; use std::sync::mpsc; - + use std::sync::{Arc, Mutex}; + use tauri_plugin_dialog::DialogExt; + let (tx, rx) = mpsc::channel(); let tx = Arc::new(Mutex::new(Some(tx))); - + app.dialog() .file() .set_title("Open Git Project") @@ -284,41 +289,46 @@ pub async fn select_git_project_folder(app: tauri::AppHandle) -> Result { let path_str = match path { tauri_plugin_dialog::FilePath::Path(p) => p.to_string_lossy().to_string(), tauri_plugin_dialog::FilePath::Url(u) => u.to_string(), }; - + // Validate that the selected folder is a git repository let selected_path = Path::new(&path_str); if !is_valid_git_repository(selected_path) { return Err("Selected folder is not a valid git repository. Please select a folder containing a .git directory.".to_string()); } - + Ok(Some(path_str)) - }, + } Ok(None) => { // User cancelled Ok(None) - }, - Err(_) => { - Err("Failed to receive folder selection result".to_string()) - }, + } + Err(_) => Err("Failed to receive folder selection result".to_string()), } } #[tauri::command] -pub async fn create_workspace_worktree(app: tauri::AppHandle, project_path: String, name: String) -> Result { +pub async fn create_workspace_worktree( + app: tauri::AppHandle, + project_path: String, + name: String, +) -> Result { // Ensure valid repo let repo = PathBuf::from(&project_path); - if !is_valid_git_repository(&repo) { return Err("Not a valid git repository".to_string()); } + if !is_valid_git_repository(&repo) { + return Err("Not a valid git repository".to_string()); + } // Ensure .commander directory let commander_dir = repo.join(".commander"); - std::fs::create_dir_all(&commander_dir).map_err(|e| format!("Failed to create .commander: {}", e))?; + std::fs::create_dir_all(&commander_dir) + .map_err(|e| format!("Failed to create .commander: {}", e))?; // Generate branch name let branch = format!("workspace/{}", name); @@ -326,41 +336,77 @@ pub async fn create_workspace_worktree(app: tauri::AppHandle, project_path: Stri // Create worktree on new branch let status = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) - .args(["worktree","add","-B", &branch, target_path.to_string_lossy().as_ref()]) - .output().await.map_err(|e| format!("git worktree add failed: {}", e))?; + .arg("-C") + .arg(&project_path) + .args([ + "worktree", + "add", + "-B", + &branch, + target_path.to_string_lossy().as_ref(), + ]) + .output() + .await + .map_err(|e| format!("git worktree add failed: {}", e))?; if !status.status.success() { - return Err(format!("Failed to add worktree: {}", String::from_utf8_lossy(&status.stderr))); + return Err(format!( + "Failed to add worktree: {}", + String::from_utf8_lossy(&status.stderr) + )); } Ok(target_path.to_string_lossy().to_string()) } #[tauri::command] -pub async fn remove_workspace_worktree(project_path: String, worktree_path: String) -> Result<(), String> { +pub async fn remove_workspace_worktree( + project_path: String, + worktree_path: String, +) -> Result<(), String> { // Remove worktree (prunes checked-out tree) let status = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) - .args(["worktree","remove","--force", &worktree_path]) - .output().await.map_err(|e| format!("git worktree remove failed: {}", e))?; + .arg("-C") + .arg(&project_path) + .args(["worktree", "remove", "--force", &worktree_path]) + .output() + .await + .map_err(|e| format!("git worktree remove failed: {}", e))?; if !status.status.success() { - return Err(format!("Failed to remove worktree: {}", String::from_utf8_lossy(&status.stderr))); + return Err(format!( + "Failed to remove worktree: {}", + String::from_utf8_lossy(&status.stderr) + )); } Ok(()) } #[tauri::command] -pub async fn get_git_log(project_path: String, limit: Option) -> Result>, String> { +pub async fn get_git_log( + project_path: String, + limit: Option, +) -> Result>, String> { let lim = limit.unwrap_or(50).to_string(); let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) - .args(["log", "--pretty=%H|%an|%ad|%s", "--date=iso", &format!("-n{}", lim)]) - .output().await.map_err(|e| format!("Failed to run git log: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .arg("-C") + .arg(&project_path) + .args([ + "log", + "--pretty=%H|%an|%ad|%s", + "--date=iso", + &format!("-n{}", lim), + ]) + .output() + .await + .map_err(|e| format!("Failed to run git log: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } let stdout = String::from_utf8_lossy(&output.stdout); let mut rows = Vec::new(); for line in stdout.lines() { - if line.trim().is_empty() { continue; } + if line.trim().is_empty() { + continue; + } let parts: Vec<&str> = line.splitn(4, '|').collect(); if parts.len() == 4 { let mut m = std::collections::HashMap::new(); @@ -376,25 +422,40 @@ pub async fn get_git_log(project_path: String, limit: Option) -> Result Result { let output = tokio::process::Command::new("git") - .arg("-C").arg(worktree_path) - .args(["rev-parse", "--abbrev-ref", "HEAD"]) - .output().await.map_err(|e| format!("Failed to get branch: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .arg("-C") + .arg(worktree_path) + .args(["rev-parse", "--abbrev-ref", "HEAD"]) + .output() + .await + .map_err(|e| format!("Failed to get branch: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) } #[tauri::command] -pub async fn diff_workspace_vs_main(project_path: String, worktree_path: String) -> Result>, String> { +pub async fn diff_workspace_vs_main( + project_path: String, + worktree_path: String, +) -> Result>, String> { let branch = get_branch_from_worktree(&worktree_path).await?; let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) + .arg("-C") + .arg(&project_path) .args(["diff", "--name-status", "main...", &branch]) - .output().await.map_err(|e| format!("Failed to run git diff: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .output() + .await + .map_err(|e| format!("Failed to run git diff: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } let stdout = String::from_utf8_lossy(&output.stdout); let mut rows = Vec::new(); for line in stdout.lines() { - if line.trim().is_empty() { continue; } + if line.trim().is_empty() { + continue; + } let mut parts = line.split_whitespace(); if let (Some(status), Some(path)) = (parts.next(), parts.next()) { let mut m = std::collections::HashMap::new(); @@ -407,33 +468,64 @@ pub async fn diff_workspace_vs_main(project_path: String, worktree_path: String) } #[tauri::command] -pub async fn merge_workspace_to_main(project_path: String, worktree_path: String, message: Option) -> Result<(), String> { +pub async fn merge_workspace_to_main( + project_path: String, + worktree_path: String, + message: Option, +) -> Result<(), String> { let branch = get_branch_from_worktree(&worktree_path).await?; // checkout main - let co = tokio::process::Command::new("git").arg("-C").arg(&project_path).args(["checkout","main"]).output().await.map_err(|e| e.to_string())?; - if !co.status.success() { return Err(String::from_utf8_lossy(&co.stderr).to_string()); } + let co = tokio::process::Command::new("git") + .arg("-C") + .arg(&project_path) + .args(["checkout", "main"]) + .output() + .await + .map_err(|e| e.to_string())?; + if !co.status.success() { + return Err(String::from_utf8_lossy(&co.stderr).to_string()); + } // merge let msg = message.unwrap_or_else(|| format!("Merge workspace {} into main", branch)); let merge = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) - .args(["merge","--no-ff","-m", &msg, &branch]) - .output().await.map_err(|e| e.to_string())?; - if !merge.status.success() { return Err(String::from_utf8_lossy(&merge.stderr).to_string()); } + .arg("-C") + .arg(&project_path) + .args(["merge", "--no-ff", "-m", &msg, &branch]) + .output() + .await + .map_err(|e| e.to_string())?; + if !merge.status.success() { + return Err(String::from_utf8_lossy(&merge.stderr).to_string()); + } Ok(()) } #[tauri::command] -pub async fn diff_workspace_file(project_path: String, worktree_path: String, file_path: String) -> Result { +pub async fn diff_workspace_file( + project_path: String, + worktree_path: String, + file_path: String, +) -> Result { let branch = get_branch_from_worktree(&worktree_path).await?; let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) - .args(["diff", "-U200", &format!("main...{}", branch), "--", &file_path]) - .output().await.map_err(|e| format!("Failed to run git diff file: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .arg("-C") + .arg(&project_path) + .args([ + "diff", + "-U200", + &format!("main...{}", branch), + "--", + &file_path, + ]) + .output() + .await + .map_err(|e| format!("Failed to run git diff file: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } Ok(String::from_utf8_lossy(&output.stdout).to_string()) } - #[derive(serde::Serialize)] pub struct CommitDagRow { pub hash: String, @@ -448,14 +540,21 @@ pub struct CommitDagRow { pub async fn get_git_branches(project_path: String) -> Result, String> { // List local branches (short names) let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) + .arg("-C") + .arg(&project_path) .args(["for-each-ref", "--format=%(refname:short)", "refs/heads"]) - .output().await.map_err(|e| format!("Failed to list branches: {}", e))?; + .output() + .await + .map_err(|e| format!("Failed to list branches: {}", e))?; if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } let stdout = String::from_utf8_lossy(&output.stdout); - let mut branches: Vec = stdout.lines().map(|s| s.trim().to_string()).filter(|s| !s.is_empty()).collect(); + let mut branches: Vec = stdout + .lines() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); // Ensure unique and stable order (main first if present) branches.sort(); if let Some(pos) = branches.iter().position(|b| b == "main") { @@ -466,11 +565,16 @@ pub async fn get_git_branches(project_path: String) -> Result, Strin } #[tauri::command] -pub async fn get_git_commit_dag(project_path: String, limit: Option, branch: Option) -> Result, String> { +pub async fn get_git_commit_dag( + project_path: String, + limit: Option, + branch: Option, +) -> Result, String> { let lim = limit.unwrap_or(50).to_string(); let format = "%H|%P|%an|%ad|%s||%D"; let mut cmd = tokio::process::Command::new("git"); - cmd.arg("-C").arg(&project_path) + cmd.arg("-C") + .arg(&project_path) .arg("log") .arg("--date=iso") .arg(&format!("-n{}", lim)) @@ -480,33 +584,65 @@ pub async fn get_git_commit_dag(project_path: String, limit: Option, bran cmd.arg(b); } } - let output = cmd.output().await.map_err(|e| format!("Failed to run git log: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + let output = cmd + .output() + .await + .map_err(|e| format!("Failed to run git log: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } let stdout = String::from_utf8_lossy(&output.stdout); let mut rows = Vec::new(); for line in stdout.lines() { - if line.trim().is_empty() { continue; } + if line.trim().is_empty() { + continue; + } let parts: Vec<&str> = line.splitn(6, '|').collect(); - if parts.len() < 6 { continue; } + if parts.len() < 6 { + continue; + } let hash = parts[0].to_string(); - let parents = if parts[1].trim().is_empty() { vec![] } else { parts[1].split_whitespace().map(|s| s.to_string()).collect() }; + let parents = if parts[1].trim().is_empty() { + vec![] + } else { + parts[1].split_whitespace().map(|s| s.to_string()).collect() + }; let author = parts[2].to_string(); let date = parts[3].to_string(); let subject = parts[4].to_string(); let refs_str = parts[5]; - let refs: Vec = refs_str.split(',').map(|s| s.trim().to_string()).filter(|s| !s.is_empty()).collect(); - rows.push(CommitDagRow { hash, parents, author, date, subject, refs }); + let refs: Vec = refs_str + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + rows.push(CommitDagRow { + hash, + parents, + author, + date, + subject, + refs, + }); } Ok(rows) } #[tauri::command] -pub async fn get_commit_diff_files(project_path: String, commit_hash: String) -> Result>, String> { +pub async fn get_commit_diff_files( + project_path: String, + commit_hash: String, +) -> Result>, String> { let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) + .arg("-C") + .arg(&project_path) .args(["show", "--name-status", "--format=tformat:", &commit_hash]) - .output().await.map_err(|e| format!("Failed to run git show: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .output() + .await + .map_err(|e| format!("Failed to run git show: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } let stdout = String::from_utf8_lossy(&output.stdout); let mut rows = Vec::new(); for line in stdout.lines() { @@ -522,29 +658,46 @@ pub async fn get_commit_diff_files(project_path: String, commit_hash: String) -> } #[tauri::command] -pub async fn get_commit_diff_text(project_path: String, commit_hash: String, file_path: String) -> Result { +pub async fn get_commit_diff_text( + project_path: String, + commit_hash: String, + file_path: String, +) -> Result { let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) + .arg("-C") + .arg(&project_path) .args(["show", &commit_hash, "-U200", "--", &file_path]) - .output().await.map_err(|e| format!("Failed to run git show file: {}", e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .output() + .await + .map_err(|e| format!("Failed to run git show file: {}", e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } Ok(String::from_utf8_lossy(&output.stdout).to_string()) } #[tauri::command] -pub async fn get_file_at_commit(project_path: String, commit_hash: String, file_path: String) -> Result { +pub async fn get_file_at_commit( + project_path: String, + commit_hash: String, + file_path: String, +) -> Result { let spec = format!("{}:{}", commit_hash, file_path); let output = tokio::process::Command::new("git") - .arg("-C").arg(&project_path) + .arg("-C") + .arg(&project_path) .args(["show", &spec]) - .output().await.map_err(|e| format!("Failed to git show {}: {}", spec, e))?; - if !output.status.success() { return Err(String::from_utf8_lossy(&output.stderr).to_string()); } + .output() + .await + .map_err(|e| format!("Failed to git show {}: {}", spec, e))?; + if !output.status.success() { + return Err(String::from_utf8_lossy(&output.stderr).to_string()); + } Ok(String::from_utf8_lossy(&output.stdout).to_string()) } - // ---------------- Project Chat History ---------------- -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ChatMessage { @@ -554,29 +707,48 @@ pub struct ChatMessage { pub agent: Option, } -fn chat_store_key(project_path: &str) -> String { format!("chat::{}", project_path) } +fn chat_store_key(project_path: &str) -> String { + format!("chat::{}", project_path) +} #[tauri::command] -pub async fn load_project_chat(app: tauri::AppHandle, project_path: String) -> Result, String> { +pub async fn load_project_chat( + app: tauri::AppHandle, + project_path: String, +) -> Result, String> { use tauri_plugin_store::StoreExt; let store = app.store("chat-history.json").map_err(|e| e.to_string())?; let key = chat_store_key(&project_path); - let val = store.get(&key).map(|v| v.clone()).unwrap_or(serde_json::Value::Null); + let val = store + .get(&key) + .map(|v| v.clone()) + .unwrap_or(serde_json::Value::Null); let msgs: Vec = serde_json::from_value(val).unwrap_or_default(); Ok(msgs) } #[tauri::command] -pub async fn save_project_chat(app: tauri::AppHandle, project_path: String, messages: Vec) -> Result<(), String> { +pub async fn save_project_chat( + app: tauri::AppHandle, + project_path: String, + messages: Vec, +) -> Result<(), String> { use tauri_plugin_store::StoreExt; let store = app.store("chat-history.json").map_err(|e| e.to_string())?; let key = chat_store_key(&project_path); - store.set(&key, serde_json::to_value(messages).map_err(|e| e.to_string())?); + store.set( + &key, + serde_json::to_value(messages).map_err(|e| e.to_string())?, + ); store.save().map_err(|e| e.to_string()) } #[tauri::command] -pub async fn append_project_chat_message(app: tauri::AppHandle, project_path: String, message: ChatMessage) -> Result<(), String> { +pub async fn append_project_chat_message( + app: tauri::AppHandle, + project_path: String, + message: ChatMessage, +) -> Result<(), String> { let mut existing = load_project_chat(app.clone(), project_path.clone()).await?; existing.push(message); save_project_chat(app, project_path, existing).await @@ -604,9 +776,12 @@ pub fn set_cli_project_path(path: String) { } #[tauri::command] -pub async fn open_project_from_path(app: tauri::AppHandle, current_path: String) -> Result { +pub async fn open_project_from_path( + app: tauri::AppHandle, + current_path: String, +) -> Result { use std::env; - + // Get the absolute path let path = Path::new(¤t_path); let absolute_path = if path.is_absolute() { @@ -616,21 +791,24 @@ pub async fn open_project_from_path(app: tauri::AppHandle, current_path: String) .map_err(|e| format!("Failed to get current directory: {}", e))? .join(path) }; - + let path_str = absolute_path.to_string_lossy().to_string(); - + // Try to resolve git project path (handles worktrees, submodules, regular repos) if let Some(git_root) = git_service::resolve_git_project_path(&path_str) { println!("🔍 Git root found: {}", git_root); - + // Found git repository, emit event to frontend to load this project println!("📡 Emitting open-project event with path: {}", git_root); app.emit("open-project", git_root.clone()) .map_err(|e| format!("Failed to emit open-project event: {}", e))?; - + println!("✅ open-project event emitted successfully"); Ok(git_root) } else { - Err(format!("Directory '{}' is not a git repository or contains no git project", current_path)) + Err(format!( + "Directory '{}' is not a git repository or contains no git project", + current_path + )) } } diff --git a/src-tauri/src/commands/llm_commands.rs b/src-tauri/src/commands/llm_commands.rs index bc270ef..7c3d2f2 100644 --- a/src-tauri/src/commands/llm_commands.rs +++ b/src-tauri/src/commands/llm_commands.rs @@ -1,10 +1,12 @@ use std::collections::HashMap; + use tauri::Emitter; use tokio::process::Command; +use crate::commands::settings_commands::load_agent_settings; use crate::models::*; +use crate::services::agent_status_service::AgentStatusService; use crate::services::llm_service; -use crate::commands::settings_commands::load_agent_settings; // Check if a command is available in the system async fn check_command_available(command: &str) -> bool { @@ -13,7 +15,7 @@ async fn check_command_available(command: &str) -> bool { } else { Command::new("which").arg(command).output().await }; - + match check_cmd { Ok(output) => output.status.success(), Err(_) => false, @@ -25,7 +27,7 @@ pub async fn fetch_openrouter_models(api_key: String) -> Result, S if api_key.trim().is_empty() { return Err("OpenRouter API key is required to fetch models".to_string()); } - + llm_service::fetch_openrouter_models(&api_key).await } @@ -34,7 +36,7 @@ pub async fn fetch_openai_models(api_key: String) -> Result, Strin if api_key.trim().is_empty() { return Err("OpenAI API key is required to fetch models".to_string()); } - + llm_service::fetch_openai_models(&api_key).await } @@ -44,7 +46,7 @@ pub async fn check_ollama_installation() -> Result { .arg("--version") .output() .await; - + match output { Ok(output) => Ok(output.status.success()), Err(_) => Ok(false), @@ -60,21 +62,23 @@ pub async fn fetch_ollama_models() -> Result, String> { .map_err(|e| format!("Failed to execute ollama list: {}", e))?; if !output.status.success() { - return Err("Failed to list Ollama models. Make sure Ollama is installed and running.".to_string()); + return Err( + "Failed to list Ollama models. Make sure Ollama is installed and running.".to_string(), + ); } let stdout = String::from_utf8(output.stdout) .map_err(|e| format!("Failed to parse ollama output: {}", e))?; let mut models = Vec::new(); - + // Parse ollama list output // Skip the header line and process each model line for line in stdout.lines().skip(1) { if line.trim().is_empty() { continue; } - + let parts: Vec<&str> = line.split_whitespace().collect(); if parts.len() >= 1 { let model_name = parts[0].to_string(); @@ -145,14 +149,27 @@ pub async fn fetch_claude_models() -> Result, String> { // Look for lines containing model names or --model parameter info for line in stdout.lines() { let line = line.trim().to_lowercase(); - if line.contains("model") && (line.contains("claude") || line.contains("sonnet") || line.contains("opus") || line.contains("haiku")) { + if line.contains("model") + && (line.contains("claude") + || line.contains("sonnet") + || line.contains("opus") + || line.contains("haiku")) + { // Extract model names if they appear to be model identifiers if line.contains("claude-3") || line.contains("claude-3.5") { // Common Claude model patterns - if line.contains("opus") { models.push("claude-3-opus".to_string()); } - if line.contains("sonnet") { models.push("claude-3-sonnet".to_string()); } - if line.contains("haiku") { models.push("claude-3-haiku".to_string()); } - if line.contains("3.5") && line.contains("sonnet") { models.push("claude-3-5-sonnet".to_string()); } + if line.contains("opus") { + models.push("claude-3-opus".to_string()); + } + if line.contains("sonnet") { + models.push("claude-3-sonnet".to_string()); + } + if line.contains("haiku") { + models.push("claude-3-haiku".to_string()); + } + if line.contains("3.5") && line.contains("sonnet") { + models.push("claude-3-5-sonnet".to_string()); + } } } } @@ -197,9 +214,15 @@ pub async fn fetch_codex_models() -> Result, String> { for line in stdout.lines() { let line = line.trim().to_lowercase(); if line.contains("model") && (line.contains("gpt") || line.contains("codex")) { - if line.contains("gpt-4") { models.push("gpt-4".to_string()); } - if line.contains("gpt-3.5") { models.push("gpt-3.5-turbo".to_string()); } - if line.contains("codex") { models.push("code-davinci-002".to_string()); } + if line.contains("gpt-4") { + models.push("gpt-4".to_string()); + } + if line.contains("gpt-3.5") { + models.push("gpt-3.5-turbo".to_string()); + } + if line.contains("codex") { + models.push("code-davinci-002".to_string()); + } } } } @@ -218,8 +241,12 @@ pub async fn fetch_codex_models() -> Result, String> { for line in stdout.lines() { let line = line.trim().to_lowercase(); if line.contains("model") && line.contains("gpt") { - if line.contains("gpt-4") { models.push("gpt-4".to_string()); } - if line.contains("gpt-3.5") { models.push("gpt-3.5-turbo".to_string()); } + if line.contains("gpt-4") { + models.push("gpt-4".to_string()); + } + if line.contains("gpt-3.5") { + models.push("gpt-3.5-turbo".to_string()); + } } } } @@ -267,10 +294,18 @@ pub async fn fetch_gemini_models() -> Result, String> { for line in stdout.lines() { let line = line.trim().to_lowercase(); if line.contains("model") && line.contains("gemini") { - if line.contains("gemini-pro") { models.push("gemini-pro".to_string()); } - if line.contains("gemini-1.5") { models.push("gemini-1.5-pro".to_string()); } - if line.contains("gemini-ultra") { models.push("gemini-ultra".to_string()); } - if line.contains("gemini-flash") { models.push("gemini-1.5-flash".to_string()); } + if line.contains("gemini-pro") { + models.push("gemini-pro".to_string()); + } + if line.contains("gemini-1.5") { + models.push("gemini-1.5-pro".to_string()); + } + if line.contains("gemini-ultra") { + models.push("gemini-ultra".to_string()); + } + if line.contains("gemini-flash") { + models.push("gemini-1.5-flash".to_string()); + } } } @@ -302,97 +337,17 @@ pub async fn fetch_agent_models(agent: String) -> Result, String> { #[tauri::command] pub async fn check_ai_agents(app: tauri::AppHandle) -> Result { - let agents = vec![ - ("claude", "Claude Code CLI"), - ("codex", "Codex"), - ("gemini", "Gemini"), - ]; - - // Load agent settings to see which ones are enabled let enabled_agents = load_agent_settings(app).await.unwrap_or_else(|_| { - let mut default = HashMap::new(); - default.insert("claude".to_string(), true); - default.insert("codex".to_string(), true); - default.insert("gemini".to_string(), true); - default + HashMap::from([ + ("claude".to_string(), true), + ("codex".to_string(), true), + ("gemini".to_string(), true), + ]) }); - let mut checked_agents = Vec::new(); - - for (command, display_name) in agents { - let enabled = enabled_agents.get(command).unwrap_or(&true) == &true; - let mut error_message: Option = None; - - // Check availability for all agents (enabled and disabled) - let available = if enabled { - // For enabled agents, check if command exists and try to get more detailed status - let check_result = tokio::process::Command::new("which") - .arg(command) - .output() - .await; - - match check_result { - Ok(output) => { - if output.status.success() { - // Command exists, now try to run it to get more detailed error info - let version_check = tokio::process::Command::new(command) - .arg("--version") - .output() - .await; - - match version_check { - Ok(version_output) => { - if !version_output.status.success() { - let stderr = String::from_utf8_lossy(&version_output.stderr); - // Check for common error patterns like "limit" or "credits" - if stderr.to_lowercase().contains("limit") || - stderr.to_lowercase().contains("credit") || - stderr.to_lowercase().contains("quota") { - error_message = Some(format!("Rate limit or quota reached: {}", stderr.trim())); - false - } else if !stderr.trim().is_empty() { - error_message = Some(stderr.trim().to_string()); - false - } else { - true - } - } else { - true - } - } - Err(e) => { - error_message = Some(format!("Failed to execute {}: {}", command, e)); - false - } - } - } else { - error_message = Some(format!("{} command not found in PATH", command)); - false - } - } - Err(e) => { - error_message = Some(format!("Failed to check {}: {}", command, e)); - false - } - } - } else { - // For disabled agents, don't check availability but mark as unavailable - false - }; - - checked_agents.push(AIAgent { - name: command.to_string(), - command: command.to_string(), - display_name: display_name.to_string(), - available, - enabled, - error_message, - }); - } - - Ok(AgentStatus { - agents: checked_agents, - }) + AgentStatusService::new() + .check_agents(&enabled_agents) + .await } #[tauri::command] @@ -405,7 +360,10 @@ pub async fn generate_plan(prompt: String, system_prompt: String) -> Result'".to_string()); + return Err( + "No Ollama models available. Please pull a model first with 'ollama pull '" + .to_string(), + ); } // Use the first available model (you could make this configurable) @@ -433,7 +391,7 @@ pub async fn generate_plan(prompt: String, system_prompt: String) -> Result Result<(), String> { tokio::time::sleep(tokio::time::Duration::from_secs(10)).await; } }); - + Ok(()) -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/menu_commands.rs b/src-tauri/src/commands/menu_commands.rs index b89bf3f..88c093e 100644 --- a/src-tauri/src/commands/menu_commands.rs +++ b/src-tauri/src/commands/menu_commands.rs @@ -6,27 +6,29 @@ use crate::commands::project_commands::open_existing_project as cmd_open_existin #[tauri::command] pub async fn menu_new_project(app: tauri::AppHandle) -> Result<(), String> { // Emit event to frontend to show new project dialog - app.emit("menu://new-project", ()).map_err(|e| e.to_string())?; + app.emit("menu://new-project", ()) + .map_err(|e| e.to_string())?; Ok(()) } -#[tauri::command] +#[tauri::command] pub async fn menu_clone_project(app: tauri::AppHandle) -> Result<(), String> { // Emit event to frontend to show clone project dialog - app.emit("menu://clone-project", ()).map_err(|e| e.to_string())?; + app.emit("menu://clone-project", ()) + .map_err(|e| e.to_string())?; Ok(()) } #[tauri::command] pub async fn menu_open_project(app: tauri::AppHandle) -> Result<(), String> { // Use native file picker to select project directory - use tauri_plugin_dialog::DialogExt; - use std::sync::{Arc, Mutex}; use std::sync::mpsc; - + use std::sync::{Arc, Mutex}; + use tauri_plugin_dialog::DialogExt; + let (tx, rx) = mpsc::channel(); let tx = Arc::new(Mutex::new(Some(tx))); - + app.dialog() .file() .set_title("Open Project Folder") @@ -38,7 +40,7 @@ pub async fn menu_open_project(app: tauri::AppHandle) -> Result<(), String> { } } }); - + match rx.recv() { Ok(Some(path)) => { let path_str = match path { @@ -50,32 +52,35 @@ pub async fn menu_open_project(app: tauri::AppHandle) -> Result<(), String> { match cmd_open_existing_project(app.clone(), path_str.clone()).await { Ok(_recent) => { // Emit event to frontend with selected project path - app.emit("menu://open-project", path_str).map_err(|e| e.to_string())?; - }, + app.emit("menu://open-project", path_str) + .map_err(|e| e.to_string())?; + } Err(e) => return Err(e), } - }, + } Ok(None) => { // User cancelled - }, + } Err(_) => { return Err("Failed to receive folder selection result".to_string()); - }, + } } - + Ok(()) } #[tauri::command] pub async fn menu_close_project(app: tauri::AppHandle) -> Result<(), String> { // Emit event to frontend to close current project - app.emit("menu://close-project", ()).map_err(|e| e.to_string())?; + app.emit("menu://close-project", ()) + .map_err(|e| e.to_string())?; Ok(()) } #[tauri::command] pub async fn menu_delete_project(app: tauri::AppHandle) -> Result<(), String> { // Emit event to frontend to show delete project confirmation - app.emit("menu://delete-project", ()).map_err(|e| e.to_string())?; + app.emit("menu://delete-project", ()) + .map_err(|e| e.to_string())?; Ok(()) } diff --git a/src-tauri/src/commands/mod.rs b/src-tauri/src/commands/mod.rs index e4edbd2..bd7bcd9 100644 --- a/src-tauri/src/commands/mod.rs +++ b/src-tauri/src/commands/mod.rs @@ -1,27 +1,27 @@ // Command modules +pub mod chat_history_commands; +pub mod chat_migration_commands; pub mod cli_commands; +pub mod file_commands; pub mod git_commands; -pub mod project_commands; pub mod llm_commands; -pub mod session_commands; -pub mod file_commands; -pub mod settings_commands; pub mod menu_commands; +pub mod project_commands; pub mod prompt_commands; +pub mod session_commands; +pub mod settings_commands; pub mod sub_agent_commands; -pub mod chat_history_commands; -pub mod chat_migration_commands; // Re-export all command functions for easy access +pub use chat_history_commands::*; +pub use chat_migration_commands::*; pub use cli_commands::*; +pub use file_commands::*; pub use git_commands::*; -pub use project_commands::*; pub use llm_commands::*; -pub use session_commands::*; -pub use file_commands::*; -pub use settings_commands::*; pub use menu_commands::*; +pub use project_commands::*; pub use prompt_commands::*; +pub use session_commands::*; +pub use settings_commands::*; pub use sub_agent_commands::*; -pub use chat_history_commands::*; -pub use chat_migration_commands::*; \ No newline at end of file diff --git a/src-tauri/src/commands/project_commands.rs b/src-tauri/src/commands/project_commands.rs index 41d4692..202a081 100644 --- a/src-tauri/src/commands/project_commands.rs +++ b/src-tauri/src/commands/project_commands.rs @@ -1,25 +1,25 @@ -use tauri_plugin_store::StoreExt; -use std::path::Path; use std::fs; +use std::path::Path; +use tauri_plugin_store::StoreExt; use crate::models::*; use crate::services::project_service; async fn scan_projects_folder(projects_folder: &str) -> Result, String> { let path = Path::new(projects_folder); - + if !path.exists() { return Ok(Vec::new()); } let mut projects = Vec::new(); - + match fs::read_dir(path) { Ok(entries) => { for entry in entries { if let Ok(entry) = entry { let entry_path = entry.path(); - + // Only consider directories if entry_path.is_dir() { if let Some(name) = entry_path.file_name() { @@ -28,16 +28,16 @@ async fn scan_projects_folder(projects_folder: &str) -> Result Result Result Result Result { Some(mut path) => { path.push("Projects"); Ok(path.to_string_lossy().to_string()) - }, + } None => Err("Could not determine user home directory".to_string()), } } @@ -133,26 +139,28 @@ pub async fn ensure_directory_exists(path: String) -> Result<(), String> { #[tauri::command] pub async fn save_projects_folder(app: tauri::AppHandle, path: String) -> Result<(), String> { - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + store.set("projects_folder", serde_json::Value::String(path.clone())); - - store.save() + + store + .save() .map_err(|e| format!("Failed to persist projects folder: {}", e))?; - + Ok(()) } #[tauri::command] pub async fn select_projects_folder(app: tauri::AppHandle) -> Result, String> { - use tauri_plugin_dialog::DialogExt; - use std::sync::{Arc, Mutex}; use std::sync::mpsc; - + use std::sync::{Arc, Mutex}; + use tauri_plugin_dialog::DialogExt; + let (tx, rx) = mpsc::channel(); let tx = Arc::new(Mutex::new(Some(tx))); - + app.dialog() .file() .set_title("Select Default Projects Folder") @@ -164,7 +172,7 @@ pub async fn select_projects_folder(app: tauri::AppHandle) -> Result Ok(result), Err(_) => Err("Failed to receive folder selection result".to_string()), @@ -173,9 +181,10 @@ pub async fn select_projects_folder(app: tauri::AppHandle) -> Result Result, String> { - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + match store.get("projects_folder") { Some(serde_json::Value::String(path)) => Ok(Some(path)), _ => Ok(None), @@ -185,16 +194,17 @@ pub async fn load_projects_folder(app: tauri::AppHandle) -> Result Result, String> { // Load from persistent storage instead of just scanning current folder - let store = app.store("recent-projects.json") + let store = app + .store("recent-projects.json") .map_err(|e| format!("Failed to access recent projects store: {}", e))?; - + match store.get("projects") { Some(value) => { - let projects: Vec = serde_json::from_value(value) - .unwrap_or_else(|_| Vec::new()); - + let projects: Vec = + serde_json::from_value(value).unwrap_or_else(|_| Vec::new()); + let original_count = projects.len(); - + // Filter out projects that no longer exist and update git info let mut valid_projects = Vec::new(); for project in projects { @@ -203,26 +213,30 @@ pub async fn list_recent_projects(app: tauri::AppHandle) -> Result Result Result { // If no recent projects exist, scan current projects folder as fallback let projects_folder = match load_projects_folder(app.clone()).await? { Some(folder) => folder, None => get_default_projects_folder().await?, }; - + scan_projects_folder(&projects_folder).await } } @@ -287,14 +304,17 @@ pub async fn refresh_recent_projects(app: tauri::AppHandle) -> Result Result<(), String> { println!("🧹 Clearing recent projects storage for development..."); - - let store = app.store("recent-projects.json") + + let store = app + .store("recent-projects.json") .map_err(|e| format!("Failed to access recent projects store: {}", e))?; - + // Clear the projects array store.set("projects", serde_json::Value::Array(vec![])); - store.save().map_err(|e| format!("Failed to save cleared recent projects: {}", e))?; - + store + .save() + .map_err(|e| format!("Failed to save cleared recent projects: {}", e))?; + println!("✅ Recent projects storage cleared successfully!"); Ok(()) } @@ -308,30 +328,36 @@ pub async fn open_existing_project( } #[tauri::command] -pub async fn check_project_name_conflict(projects_folder: String, project_name: String) -> Result { - Ok(project_service::check_project_name_conflict(&projects_folder, &project_name)) +pub async fn check_project_name_conflict( + projects_folder: String, + project_name: String, +) -> Result { + Ok(project_service::check_project_name_conflict( + &projects_folder, + &project_name, + )) } #[tauri::command] pub async fn create_new_project_with_git( app: tauri::AppHandle, - projects_folder: String, - project_name: String + projects_folder: String, + project_name: String, ) -> Result { use std::process::Stdio; - + let project_path = std::path::Path::new(&projects_folder).join(&project_name); let project_path_str = project_path.to_string_lossy().to_string(); - + // Check if project already exists if project_path.exists() { return Err(format!("A project named '{}' already exists", project_name)); } - + // Create the directory std::fs::create_dir_all(&project_path) .map_err(|e| format!("Failed to create project directory: {}", e))?; - + // Initialize git repository let git_init = tokio::process::Command::new("git") .args(&["init"]) @@ -342,18 +368,21 @@ pub async fn create_new_project_with_git( .output() .await .map_err(|e| format!("Failed to initialize git repository: {}", e))?; - + if !git_init.status.success() { let stderr = String::from_utf8_lossy(&git_init.stderr); return Err(format!("Git init failed: {}", stderr)); } - + // Create README.md file - let readme_content = format!("# {}\n\nA new project created with Commander.\n", project_name); + let readme_content = format!( + "# {}\n\nA new project created with Commander.\n", + project_name + ); let readme_path = project_path.join("README.md"); std::fs::write(&readme_path, readme_content) .map_err(|e| format!("Failed to create README.md: {}", e))?; - + // Stage and commit the README let git_add = tokio::process::Command::new("git") .args(&["add", "README.md"]) @@ -362,12 +391,12 @@ pub async fn create_new_project_with_git( .output() .await .map_err(|e| format!("Failed to stage README: {}", e))?; - + if !git_add.status.success() { let stderr = String::from_utf8_lossy(&git_add.stderr); return Err(format!("Git add failed: {}", stderr)); } - + let git_commit = tokio::process::Command::new("git") .args(&["commit", "-m", "Initial commit with README"]) .current_dir(&project_path) @@ -375,19 +404,25 @@ pub async fn create_new_project_with_git( .output() .await .map_err(|e| format!("Failed to commit README: {}", e))?; - + if !git_commit.status.success() { let stderr = String::from_utf8_lossy(&git_commit.stderr); return Err(format!("Git commit failed: {}", stderr)); } - + // Add the newly created project to recent projects // TODO: Be able to handle this better, I think the history of projects is always flagging the new project correctly but unflagging the previous one I was working. if let Err(e) = add_project_to_recent(app, project_path_str.clone()).await { - eprintln!("⚠️ Warning: Failed to add project to recent projects: {}", e); + eprintln!( + "⚠️ Warning: Failed to add project to recent projects: {}", + e + ); // Don't fail the whole operation, just log the warning } - - println!("✅ Project '{}' created successfully and added to recent projects", project_name); + + println!( + "✅ Project '{}' created successfully and added to recent projects", + project_name + ); Ok(project_path_str) } diff --git a/src-tauri/src/commands/prompt_commands.rs b/src-tauri/src/commands/prompt_commands.rs index 774e9b4..6b5a800 100644 --- a/src-tauri/src/commands/prompt_commands.rs +++ b/src-tauri/src/commands/prompt_commands.rs @@ -1,5 +1,5 @@ -use crate::services::prompt_service; use crate::models::*; +use crate::services::prompt_service; #[tauri::command] pub async fn load_prompts(app: tauri::AppHandle) -> Result { @@ -17,16 +17,29 @@ pub async fn get_default_prompts() -> Result { } #[tauri::command] -pub async fn update_prompt(app: tauri::AppHandle, category: String, key: String, prompt: PromptTemplate) -> Result<(), String> { +pub async fn update_prompt( + app: tauri::AppHandle, + category: String, + key: String, + prompt: PromptTemplate, +) -> Result<(), String> { prompt_service::update_prompt(&app, &category, &key, &prompt).await } #[tauri::command] -pub async fn delete_prompt(app: tauri::AppHandle, category: String, key: String) -> Result<(), String> { +pub async fn delete_prompt( + app: tauri::AppHandle, + category: String, + key: String, +) -> Result<(), String> { prompt_service::delete_prompt(&app, &category, &key).await } #[tauri::command] -pub async fn create_prompt_category(app: tauri::AppHandle, category: String, description: String) -> Result<(), String> { +pub async fn create_prompt_category( + app: tauri::AppHandle, + category: String, + description: String, +) -> Result<(), String> { prompt_service::create_category(&app, &category, &description).await -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/session_commands.rs b/src-tauri/src/commands/session_commands.rs index 22b9fe7..f2a75d4 100644 --- a/src-tauri/src/commands/session_commands.rs +++ b/src-tauri/src/commands/session_commands.rs @@ -1,5 +1,8 @@ +use crate::commands::cli_commands::{ + cleanup_cli_sessions, get_sessions_status, send_quit_to_session, terminate_all_active_sessions, + terminate_session_by_id, +}; use crate::models::*; -use crate::commands::cli_commands::{cleanup_cli_sessions, get_sessions_status, terminate_session_by_id, terminate_all_active_sessions, send_quit_to_session}; #[tauri::command] pub async fn get_active_sessions() -> Result { @@ -24,4 +27,4 @@ pub async fn send_quit_command_to_session(session_id: String) -> Result<(), Stri #[tauri::command] pub async fn cleanup_sessions() -> Result<(), String> { cleanup_cli_sessions().await -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/settings_commands.rs b/src-tauri/src/commands/settings_commands.rs index 0531ea9..3f0cbc2 100644 --- a/src-tauri/src/commands/settings_commands.rs +++ b/src-tauri/src/commands/settings_commands.rs @@ -7,17 +7,19 @@ use crate::models::*; #[tauri::command] pub async fn save_app_settings(app: tauri::AppHandle, settings: AppSettings) -> Result<(), String> { - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + let serialized_settings = serde_json::to_value(&settings) .map_err(|e| format!("Failed to serialize settings: {}", e))?; - + store.set("app_settings", serialized_settings); - - store.save() + + store + .save() .map_err(|e| format!("Failed to persist settings: {}", e))?; - + // Also persist user-facing option into ~/.commander/settings.json let _ = set_show_recent_projects_welcome_screen(settings.show_welcome_recent_projects); @@ -40,9 +42,10 @@ pub async fn set_window_theme(window: tauri::Window, theme: String) -> Result<() #[tauri::command] pub async fn load_app_settings(app: tauri::AppHandle) -> Result { - let store = app.store("app-settings.json") + let store = app + .store("app-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + match store.get("app_settings") { Some(value) => { let settings: AppSettings = serde_json::from_value(value) @@ -52,14 +55,14 @@ pub async fn load_app_settings(app: tauri::AppHandle) -> Result { // Return default settings let mut d = AppSettings::default(); let show = get_show_recent_projects_welcome_screen().unwrap_or(true); d.show_welcome_recent_projects = show; Ok(d) - }, + } } } @@ -74,48 +77,59 @@ pub async fn set_show_recent_projects_setting(enabled: bool) -> Result<(), Strin } #[tauri::command] -pub async fn save_agent_settings(app: tauri::AppHandle, settings: HashMap) -> Result<(), String> { - let store = app.store("agent-settings.json") +pub async fn save_agent_settings( + app: tauri::AppHandle, + settings: HashMap, +) -> Result<(), String> { + let store = app + .store("agent-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + let serialized_settings = serde_json::to_value(&settings) .map_err(|e| format!("Failed to serialize settings: {}", e))?; - + store.set("agent_settings", serialized_settings); - - store.save() + + store + .save() .map_err(|e| format!("Failed to persist settings: {}", e))?; - + Ok(()) } #[tauri::command] -pub async fn save_all_agent_settings(app: tauri::AppHandle, settings: AllAgentSettings) -> Result<(), String> { - let store = app.store("all-agent-settings.json") +pub async fn save_all_agent_settings( + app: tauri::AppHandle, + settings: AllAgentSettings, +) -> Result<(), String> { + let store = app + .store("all-agent-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + let serialized_settings = serde_json::to_value(&settings) .map_err(|e| format!("Failed to serialize settings: {}", e))?; - + store.set("all_agent_settings", serialized_settings); - - store.save() + + store + .save() .map_err(|e| format!("Failed to persist settings: {}", e))?; - + Ok(()) } #[tauri::command] pub async fn load_all_agent_settings(app: tauri::AppHandle) -> Result { - let store = app.store("all-agent-settings.json") + let store = app + .store("all-agent-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + match store.get("all_agent_settings") { Some(value) => { let settings: AllAgentSettings = serde_json::from_value(value) .map_err(|e| format!("Failed to deserialize settings: {}", e))?; Ok(settings) - }, + } None => { // Return default settings Ok(AllAgentSettings { @@ -124,16 +138,18 @@ pub async fn load_all_agent_settings(app: tauri::AppHandle) -> Result Result { - let home = dirs::home_dir().ok_or_else(|| "Could not determine user home directory".to_string())?; + let home = + dirs::home_dir().ok_or_else(|| "Could not determine user home directory".to_string())?; let dir = home.join(".commander"); if !dir.exists() { - fs::create_dir_all(&dir).map_err(|e| format!("Failed to create settings directory: {}", e))?; + fs::create_dir_all(&dir) + .map_err(|e| format!("Failed to create settings directory: {}", e))?; } Ok(dir.join("settings.json")) } @@ -147,7 +163,8 @@ fn load_user_settings_json() -> Result { } })); } - let content = fs::read_to_string(&path).map_err(|e| format!("Failed to read settings.json: {}", e))?; + let content = + fs::read_to_string(&path).map_err(|e| format!("Failed to read settings.json: {}", e))?; let v: serde_json::Value = serde_json::from_str(&content).unwrap_or(serde_json::json!({})); Ok(v) } @@ -155,8 +172,11 @@ fn load_user_settings_json() -> Result { fn save_user_settings_json(mut root: serde_json::Value) -> Result<(), String> { let path = user_settings_path()?; // Ensure object root - if !root.is_object() { root = serde_json::json!({}); } - let content = serde_json::to_string_pretty(&root).map_err(|e| format!("Failed to serialize settings.json: {}", e))?; + if !root.is_object() { + root = serde_json::json!({}); + } + let content = serde_json::to_string_pretty(&root) + .map_err(|e| format!("Failed to serialize settings.json: {}", e))?; fs::write(&path, content).map_err(|e| format!("Failed to write settings.json: {}", e))?; Ok(()) } @@ -181,15 +201,16 @@ fn set_show_recent_projects_welcome_screen(enabled: bool) -> Result<(), String> #[tauri::command] pub async fn load_agent_settings(app: tauri::AppHandle) -> Result, String> { - let store = app.store("agent-settings.json") + let store = app + .store("agent-settings.json") .map_err(|e| format!("Failed to access store: {}", e))?; - + match store.get("agent_settings") { Some(value) => { let settings: HashMap = serde_json::from_value(value) .map_err(|e| format!("Failed to deserialize settings: {}", e))?; Ok(settings) - }, + } None => { // Return default settings (all agents enabled) let mut default = HashMap::new(); @@ -197,6 +218,6 @@ pub async fn load_agent_settings(app: tauri::AppHandle) -> Result, operation: String, message: String, }, - + /// External command execution errors - Command { + Command { command: String, exit_code: Option, message: String, }, - + /// Network/API errors - Network { + Network { url: String, status_code: Option, message: String, }, - + /// Serialization/deserialization errors - Serialization { - data_type: String, - message: String, - }, - + Serialization { data_type: String, message: String }, + /// Permission/access errors - Permission { - resource: String, - message: String, - }, - + Permission { resource: String, message: String }, + /// Validation errors - Validation { + Validation { field: String, value: String, message: String, }, - + /// Generic application errors - Application { - component: String, - message: String, - }, + Application { component: String, message: String }, } impl CommanderError { /// Create a Git error - pub fn git(operation: impl Into, path: impl Into, message: impl Into) -> Self { + pub fn git( + operation: impl Into, + path: impl Into, + message: impl Into, + ) -> Self { Self::Git { operation: operation.into(), path: path.into(), message: message.into(), } } - + /// Create a Project error - pub fn project(operation: impl Into, project_name: impl Into, message: impl Into) -> Self { + pub fn project( + operation: impl Into, + project_name: impl Into, + message: impl Into, + ) -> Self { Self::Project { operation: operation.into(), project_name: project_name.into(), message: message.into(), } } - + /// Create a File System error - pub fn file_system(operation: impl Into, path: impl Into, message: impl Into) -> Self { + pub fn file_system( + operation: impl Into, + path: impl Into, + message: impl Into, + ) -> Self { Self::FileSystem { operation: operation.into(), path: path.into(), message: message.into(), } } - + /// Create an LLM error - pub fn llm(provider: impl Into, operation: impl Into, message: impl Into) -> Self { + pub fn llm( + provider: impl Into, + operation: impl Into, + message: impl Into, + ) -> Self { Self::LLM { provider: provider.into(), operation: operation.into(), message: message.into(), } } - + /// Create a Configuration error pub fn configuration(component: impl Into, message: impl Into) -> Self { Self::Configuration { @@ -130,34 +134,46 @@ impl CommanderError { message: message.into(), } } - + /// Create a Session error - pub fn session(session_id: Option, operation: impl Into, message: impl Into) -> Self { + pub fn session( + session_id: Option, + operation: impl Into, + message: impl Into, + ) -> Self { Self::Session { session_id, operation: operation.into(), message: message.into(), } } - + /// Create a Command execution error - pub fn command(command: impl Into, exit_code: Option, message: impl Into) -> Self { + pub fn command( + command: impl Into, + exit_code: Option, + message: impl Into, + ) -> Self { Self::Command { command: command.into(), exit_code, message: message.into(), } } - + /// Create a Network error - pub fn network(url: impl Into, status_code: Option, message: impl Into) -> Self { + pub fn network( + url: impl Into, + status_code: Option, + message: impl Into, + ) -> Self { Self::Network { url: url.into(), status_code, message: message.into(), } } - + /// Create a Serialization error pub fn serialization(data_type: impl Into, message: impl Into) -> Self { Self::Serialization { @@ -165,7 +181,7 @@ impl CommanderError { message: message.into(), } } - + /// Create a Permission error pub fn permission(resource: impl Into, message: impl Into) -> Self { Self::Permission { @@ -173,16 +189,20 @@ impl CommanderError { message: message.into(), } } - + /// Create a Validation error - pub fn validation(field: impl Into, value: impl Into, message: impl Into) -> Self { + pub fn validation( + field: impl Into, + value: impl Into, + message: impl Into, + ) -> Self { Self::Validation { field: field.into(), value: value.into(), message: message.into(), } } - + /// Create a generic Application error pub fn application(component: impl Into, message: impl Into) -> Self { Self::Application { @@ -190,58 +210,105 @@ impl CommanderError { message: message.into(), } } - + /// Get user-friendly error message pub fn user_message(&self) -> String { match self { - CommanderError::Git { operation, path, message } => { - format!("Git operation '{}' failed for '{}': {}", operation, path, message) + CommanderError::Git { + operation, + path, + message, + } => { + format!( + "Git operation '{}' failed for '{}': {}", + operation, path, message + ) } - CommanderError::Project { operation, project_name, message } => { - format!("Project operation '{}' failed for '{}': {}", operation, project_name, message) + CommanderError::Project { + operation, + project_name, + message, + } => { + format!( + "Project operation '{}' failed for '{}': {}", + operation, project_name, message + ) } - CommanderError::FileSystem { operation, path, message } => { - format!("File operation '{}' failed for '{}': {}", operation, path, message) + CommanderError::FileSystem { + operation, + path, + message, + } => { + format!( + "File operation '{}' failed for '{}': {}", + operation, path, message + ) } - CommanderError::LLM { provider, operation, message } => { + CommanderError::LLM { + provider, + operation, + message, + } => { format!("{} operation '{}' failed: {}", provider, operation, message) } CommanderError::Configuration { component, message } => { format!("Configuration error in {}: {}", component, message) } - CommanderError::Session { session_id, operation, message } => { - match session_id { - Some(id) => format!("Session '{}' operation '{}' failed: {}", id, operation, message), - None => format!("Session operation '{}' failed: {}", operation, message), - } - } - CommanderError::Command { command, exit_code, message } => { - match exit_code { - Some(code) => format!("Command '{}' failed with exit code {}: {}", command, code, message), - None => format!("Command '{}' failed: {}", command, message), - } - } - CommanderError::Network { url, status_code, message } => { - match status_code { - Some(code) => format!("Network request to '{}' failed with status {}: {}", url, code, message), - None => format!("Network request to '{}' failed: {}", url, message), - } - } + CommanderError::Session { + session_id, + operation, + message, + } => match session_id { + Some(id) => format!( + "Session '{}' operation '{}' failed: {}", + id, operation, message + ), + None => format!("Session operation '{}' failed: {}", operation, message), + }, + CommanderError::Command { + command, + exit_code, + message, + } => match exit_code { + Some(code) => format!( + "Command '{}' failed with exit code {}: {}", + command, code, message + ), + None => format!("Command '{}' failed: {}", command, message), + }, + CommanderError::Network { + url, + status_code, + message, + } => match status_code { + Some(code) => format!( + "Network request to '{}' failed with status {}: {}", + url, code, message + ), + None => format!("Network request to '{}' failed: {}", url, message), + }, CommanderError::Serialization { data_type, message } => { format!("Failed to process {} data: {}", data_type, message) } CommanderError::Permission { resource, message } => { format!("Permission denied for '{}': {}", resource, message) } - CommanderError::Validation { field, value, message } => { - format!("Invalid value '{}' for field '{}': {}", value, field, message) + CommanderError::Validation { + field, + value, + message, + } => { + format!( + "Invalid value '{}' for field '{}': {}", + value, field, message + ) } CommanderError::Application { component, message } => { format!("{}: {}", component, message) } } } - + /// Get technical error message (for logging) pub fn technical_message(&self) -> String { format!("{:?}", self) @@ -294,4 +361,4 @@ macro_rules! app_error { ($component:expr, $msg:expr) => { CommanderError::application($component, $msg) }; -} \ No newline at end of file +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 26bba31..35cb8ce 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -2,10 +2,10 @@ use tauri::menu::{MenuBuilder, MenuItemBuilder, SubmenuBuilder}; use tauri::Emitter; // Import all modules -mod models; -mod services; mod commands; mod error; +mod models; +mod services; use commands::*; @@ -40,57 +40,64 @@ fn create_native_menu(app: &tauri::App) -> Result, // Create the app menu (Commander) - this will be the first menu on macOS let app_submenu = SubmenuBuilder::new(app, "Commander") - .item(&MenuItemBuilder::with_id("about", "About Commander") - .build(app)?) + .item(&MenuItemBuilder::with_id("about", "About Commander").build(app)?) .separator() - .item(&MenuItemBuilder::with_id("preferences", "Preferences...") - .accelerator("CmdOrCtrl+,") - .build(app)?) + .item( + &MenuItemBuilder::with_id("preferences", "Preferences...") + .accelerator("CmdOrCtrl+,") + .build(app)?, + ) .separator() .item(&PredefinedMenuItem::quit(app, Some("Quit Commander"))?) .build()?; - + // Create Projects submenu as a separate menu let projects_submenu = SubmenuBuilder::new(app, "Projects") - .item(&MenuItemBuilder::with_id("new_project", "New Project") - .accelerator("CmdOrCtrl+N") - .build(app)?) + .item( + &MenuItemBuilder::with_id("new_project", "New Project") + .accelerator("CmdOrCtrl+N") + .build(app)?, + ) .separator() - .item(&MenuItemBuilder::with_id("clone_project", "Clone Project") - .accelerator("CmdOrCtrl+Shift+N") - .build(app)?) - .item(&MenuItemBuilder::with_id("open_project", "Open Project...") - .accelerator("CmdOrCtrl+O") - .build(app)?) + .item( + &MenuItemBuilder::with_id("clone_project", "Clone Project") + .accelerator("CmdOrCtrl+Shift+N") + .build(app)?, + ) + .item( + &MenuItemBuilder::with_id("open_project", "Open Project...") + .accelerator("CmdOrCtrl+O") + .build(app)?, + ) .separator() - .item(&MenuItemBuilder::with_id("close_project", "Close Project") - .accelerator("CmdOrCtrl+W") - .build(app)?) + .item( + &MenuItemBuilder::with_id("close_project", "Close Project") + .accelerator("CmdOrCtrl+W") + .build(app)?, + ) .separator() - .item(&MenuItemBuilder::with_id("delete_project", "Delete Current Project") - .build(app)?) + .item(&MenuItemBuilder::with_id("delete_project", "Delete Current Project").build(app)?) .build()?; - - + // Create Help submenu let help_submenu = SubmenuBuilder::new(app, "Help") - .item(&MenuItemBuilder::with_id("documentation", "Documentation") - .build(app)?) - .item(&MenuItemBuilder::with_id("keyboard_shortcuts_help", "Keyboard Shortcuts") - .build(app)?) + .item(&MenuItemBuilder::with_id("documentation", "Documentation").build(app)?) + .item( + &MenuItemBuilder::with_id("keyboard_shortcuts_help", "Keyboard Shortcuts") + .build(app)?, + ) .separator() - .item(&MenuItemBuilder::with_id("report_issue", "Report Issue") - .build(app)?) + .item(&MenuItemBuilder::with_id("report_issue", "Report Issue").build(app)?) .build()?; - + // Create main menu - order matters on macOS let menu = MenuBuilder::new(app) - .item(&app_submenu) // Commander menu (first) - .item(&projects_submenu) // Projects menu (second) - .item(&edit_submenu) // Edit menu (third) enables keyboard copy/paste - .item(&help_submenu) // Help menu (fourth) + .item(&app_submenu) // Commander menu (first) + .item(&projects_submenu) // Projects menu (second) + .item(&edit_submenu) // Edit menu (third) enables keyboard copy/paste + .item(&help_submenu) // Help menu (fourth) .build()?; - + Ok(menu) } @@ -104,7 +111,7 @@ pub fn run() { .plugin(tauri_plugin_global_shortcut::Builder::new().build()) .plugin(tauri_plugin_dialog::init()) .invoke_handler(tauri::generate_handler![ - greet, + greet, start_drag, execute_cli_command, execute_persistent_cli_command, @@ -221,7 +228,8 @@ pub fn run() { select_git_project_folder, open_project_from_path, get_cli_project_path, - clear_cli_project_path + clear_cli_project_path, + open_file_in_editor ]) .setup(|app| { // Handle command line arguments for opening projects @@ -230,25 +238,27 @@ pub fn run() { if args.len() > 1 { let path_arg = args[1].clone(); // Clone the string to avoid borrowing issues let app_handle = app.handle().clone(); - + // Spawn async task to handle project opening tauri::async_runtime::spawn(async move { // Wait longer for frontend to fully initialize and set up event listeners println!("⏳ Waiting for frontend to initialize..."); tokio::time::sleep(tokio::time::Duration::from_millis(2000)).await; - + println!("🚀 Processing CLI project path: {}", path_arg); - + // Resolve and store the project path for frontend to pick up let absolute_path = if std::path::Path::new(&path_arg).is_absolute() { std::path::PathBuf::from(&path_arg) } else { std::env::current_dir().unwrap_or_default().join(&path_arg) }; - + let path_str = absolute_path.to_string_lossy().to_string(); - - if let Some(git_root) = crate::services::git_service::resolve_git_project_path(&path_str) { + + if let Some(git_root) = + crate::services::git_service::resolve_git_project_path(&path_str) + { println!("✅ CLI git root found: {}", git_root); commands::git_commands::set_cli_project_path(git_root); } else { @@ -257,13 +267,13 @@ pub fn run() { }); } use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut, ShortcutState}; - + // Create and set the native menu println!("🍎 Creating native menu..."); let menu = create_native_menu(app)?; app.set_menu(menu.clone())?; println!("✅ Native menu created and set successfully!"); - + // Handle menu events app.on_menu_event({ let app_handle = app.handle().clone(); @@ -276,49 +286,49 @@ pub fn run() { "new_project" => { println!("📝 Creating new project via menu..."); let _ = menu_new_project(app_clone).await; - }, + } "clone_project" => { println!("🌿 Cloning project via menu..."); let _ = menu_clone_project(app_clone).await; - }, + } "open_project" => { println!("📂 Opening project via menu..."); let _ = menu_open_project(app_clone).await; - }, + } "close_project" => { println!("❌ Closing project via menu..."); let _ = menu_close_project(app_clone).await; - }, + } "delete_project" => { println!("🗑️ Deleting project via menu..."); let _ = menu_delete_project(app_clone).await; - }, + } // Settings menu items "preferences" => { println!("⚙️ Opening preferences via menu..."); app_clone.emit("menu://open-settings", ()).unwrap(); - }, + } "keyboard_shortcuts" => { println!("⌨️ Opening keyboard shortcuts via menu..."); app_clone.emit("menu://open-shortcuts", ()).unwrap(); - }, + } // Help menu items "about" => { println!("ℹ️ Opening about dialog via menu..."); app_clone.emit("menu://open-about", ()).unwrap(); - }, + } "documentation" => { println!("📚 Opening documentation via menu..."); app_clone.emit("menu://open-docs", ()).unwrap(); - }, + } "keyboard_shortcuts_help" => { println!("⌨️ Opening keyboard shortcuts help via menu..."); app_clone.emit("menu://open-shortcuts", ()).unwrap(); - }, + } "report_issue" => { println!("🐛 Opening issue reporter via menu..."); app_clone.emit("menu://report-issue", ()).unwrap(); - }, + } _ => { println!("Unhandled menu event: {:?}", event.id()); } @@ -326,13 +336,13 @@ pub fn run() { }); } }); - + // Start monitoring AI agents on app startup let app_handle = app.handle().clone(); tauri::async_runtime::spawn(async move { let _ = monitor_ai_agents(app_handle).await; }); - + // Start session cleanup task tauri::async_runtime::spawn(async move { loop { @@ -342,31 +352,36 @@ pub fn run() { } }); - // Register Cmd+, shortcut for Settings on macOS let shortcut_manager = app.global_shortcut(); - let settings_shortcut = Shortcut::new(Some(tauri_plugin_global_shortcut::Modifiers::SUPER), tauri_plugin_global_shortcut::Code::Comma); - + let settings_shortcut = Shortcut::new( + Some(tauri_plugin_global_shortcut::Modifiers::SUPER), + tauri_plugin_global_shortcut::Code::Comma, + ); + shortcut_manager.on_shortcut(settings_shortcut, move |app, _shortcut, event| { if event.state() == ShortcutState::Pressed { // Emit an event to the frontend to open settings app.emit("shortcut://open-settings", ()).unwrap(); } })?; - - // Register Cmd+Shift+P shortcut for Chat on macOS + + // Register Cmd+Shift+P shortcut for Chat on macOS let chat_shortcut = Shortcut::new( - Some(tauri_plugin_global_shortcut::Modifiers::SUPER | tauri_plugin_global_shortcut::Modifiers::SHIFT), - tauri_plugin_global_shortcut::Code::KeyP + Some( + tauri_plugin_global_shortcut::Modifiers::SUPER + | tauri_plugin_global_shortcut::Modifiers::SHIFT, + ), + tauri_plugin_global_shortcut::Code::KeyP, ); - + shortcut_manager.on_shortcut(chat_shortcut, move |app, _shortcut, event| { if event.state() == ShortcutState::Pressed { // Emit an event to the frontend to toggle chat app.emit("shortcut://toggle-chat", ()).unwrap(); } })?; - + Ok(()) }); diff --git a/src-tauri/src/models/ai_agent.rs b/src-tauri/src/models/ai_agent.rs index 271e8c8..cad5a9c 100644 --- a/src-tauri/src/models/ai_agent.rs +++ b/src-tauri/src/models/ai_agent.rs @@ -8,6 +8,9 @@ pub struct AIAgent { pub available: bool, pub enabled: bool, pub error_message: Option, + pub installed_version: Option, + pub latest_version: Option, + pub upgrade_available: bool, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -66,4 +69,4 @@ pub struct StreamChunk { pub session_id: String, pub content: String, pub finished: bool, -} \ No newline at end of file +} diff --git a/src-tauri/src/models/chat_history.rs b/src-tauri/src/models/chat_history.rs index 2a71a74..dcef53d 100644 --- a/src-tauri/src/models/chat_history.rs +++ b/src-tauri/src/models/chat_history.rs @@ -1,6 +1,6 @@ +use chrono::Utc; use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use chrono::Utc; /// Enhanced chat message with full metadata support #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -9,7 +9,7 @@ pub struct EnhancedChatMessage { pub role: String, // "user" | "assistant" pub content: String, pub timestamp: i64, // Unix timestamp - pub agent: String, // "claude" | "codex" | "gemini" etc. + pub agent: String, // "claude" | "codex" | "gemini" etc. pub metadata: ChatMessageMetadata, } @@ -54,10 +54,10 @@ pub struct LegacyChatMessage { /// Configuration for chat history management #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ChatHistoryConfig { - pub session_timeout_minutes: i64, // Default: 5 minutes + pub session_timeout_minutes: i64, // Default: 5 minutes pub max_sessions_per_agent: Option, // None = unlimited - pub retention_days: Option, // None = keep forever - pub compression_threshold_kb: usize, // Default: 100KB + pub retention_days: Option, // None = keep forever + pub compression_threshold_kb: usize, // Default: 100KB pub auto_summary_enabled: bool, } @@ -117,18 +117,13 @@ pub struct ChatHistoryStats { pub total_messages: usize, pub agents_used: HashMap, // agent -> session count pub branches_used: HashMap, // branch -> session count - pub date_range: Option<(i64, i64)>, // (oldest, newest) timestamps + pub date_range: Option<(i64, i64)>, // (oldest, newest) timestamps pub disk_usage_bytes: u64, } impl EnhancedChatMessage { /// Create a new enhanced chat message - pub fn new( - role: &str, - content: &str, - agent: &str, - session_id: &str, - ) -> Self { + pub fn new(role: &str, content: &str, agent: &str, session_id: &str) -> Self { Self { id: uuid::Uuid::new_v4().to_string(), role: role.to_string(), @@ -169,11 +164,7 @@ impl EnhancedChatMessage { impl ChatSession { /// Create a new chat session - pub fn new( - agent: &str, - start_time: i64, - first_message: &str, - ) -> Self { + pub fn new(agent: &str, start_time: i64, first_message: &str) -> Self { Self { id: uuid::Uuid::new_v4().to_string(), start_time, @@ -189,7 +180,7 @@ impl ChatSession { pub fn update_with_message(&mut self, message: &EnhancedChatMessage) { self.end_time = message.timestamp; self.message_count += 1; - + // Update branch if not set and message has branch info if self.branch.is_none() { self.branch = message.metadata.branch.clone(); @@ -197,7 +188,11 @@ impl ChatSession { } /// Check if this session should contain a new message based on timing and agent - pub fn should_include_message(&self, message: &EnhancedChatMessage, timeout_minutes: i64) -> bool { + pub fn should_include_message( + &self, + message: &EnhancedChatMessage, + timeout_minutes: i64, + ) -> bool { let time_gap_minutes = (message.timestamp - self.end_time) / 60; message.agent == self.agent && time_gap_minutes <= timeout_minutes } @@ -211,17 +206,17 @@ impl ChatSession { /// Extract file mentions from message content pub fn extract_file_mentions(content: &str) -> Vec { use regex::Regex; - + // Pattern to match common file paths // Matches patterns like: src/main.rs, ./config.json, /usr/local/bin/app, etc. let file_pattern = Regex::new(r"(?:^|\s|`)([^\s`]+\.[a-zA-Z0-9]{1,6})(?:\s|`|$)") .unwrap_or_else(|_| panic!("Invalid regex pattern")); - + let path_pattern = Regex::new(r"(?:^|\s|`)([a-zA-Z0-9_\-./]+/[a-zA-Z0-9_\-./]+)(?:\s|`|$)") .unwrap_or_else(|_| panic!("Invalid regex pattern")); - + let mut mentions = std::collections::HashSet::new(); - + // Extract file extensions for cap in file_pattern.captures_iter(content) { if let Some(file) = cap.get(1) { @@ -232,7 +227,7 @@ pub fn extract_file_mentions(content: &str) -> Vec { } } } - + // Extract path-like patterns for cap in path_pattern.captures_iter(content) { if let Some(path) = cap.get(1) { @@ -242,7 +237,7 @@ pub fn extract_file_mentions(content: &str) -> Vec { } } } - + mentions.into_iter().collect() } @@ -250,10 +245,16 @@ pub fn extract_file_mentions(content: &str) -> Vec { fn is_false_positive(text: &str) -> bool { // Common false positives let false_positives = [ - "http://", "https://", "localhost", "127.0.0.1", "0.0.0.0", - "package.json", "package-lock.json", "node_modules", + "http://", + "https://", + "localhost", + "127.0.0.1", + "0.0.0.0", + "package.json", + "package-lock.json", + "node_modules", ]; - + false_positives.iter().any(|&fp| text.contains(fp)) || text.len() > 100 || // Very long strings are likely not file paths text.starts_with("http") || @@ -266,7 +267,7 @@ fn generate_summary(content: &str) -> String { if content.len() <= 100 { return content.to_string(); } - + let truncated = &content[..100]; if let Some(last_space) = truncated.rfind(' ') { format!("{}...", &truncated[..last_space]) @@ -283,7 +284,7 @@ mod tests { fn test_extract_file_mentions() { let content = "Check the src/main.rs and tests/mod.rs files. Also look at ./config.json"; let mentions = extract_file_mentions(content); - + assert!(mentions.contains(&"src/main.rs".to_string())); assert!(mentions.contains(&"tests/mod.rs".to_string())); assert!(mentions.contains(&"./config.json".to_string())); @@ -293,7 +294,7 @@ mod tests { fn test_false_positive_filtering() { let content = "Visit https://example.com/api and http://localhost:3000/test"; let mentions = extract_file_mentions(content); - + // Should not extract URLs as file mentions assert!(!mentions.iter().any(|m| m.contains("http"))); } @@ -301,7 +302,7 @@ mod tests { #[test] fn test_session_grouping_logic() { let session = ChatSession::new("claude", 1000, "Test message"); - + let msg1 = EnhancedChatMessage { id: "1".to_string(), role: "user".to_string(), @@ -356,12 +357,15 @@ mod tests { }; let enhanced = EnhancedChatMessage::from_legacy(legacy, "session-123"); - + assert_eq!(enhanced.role, "user"); assert_eq!(enhanced.content, "Check src/main.rs please"); assert_eq!(enhanced.timestamp, 1234567890); assert_eq!(enhanced.agent, "claude"); assert_eq!(enhanced.metadata.session_id, "session-123"); - assert!(enhanced.metadata.file_mentions.contains(&"src/main.rs".to_string())); + assert!(enhanced + .metadata + .file_mentions + .contains(&"src/main.rs".to_string())); } -} \ No newline at end of file +} diff --git a/src-tauri/src/models/file.rs b/src-tauri/src/models/file.rs index 5ea4699..e6d38f1 100644 --- a/src-tauri/src/models/file.rs +++ b/src-tauri/src/models/file.rs @@ -13,4 +13,4 @@ pub struct FileInfo { pub struct DirectoryListing { pub current_directory: String, pub files: Vec, -} \ No newline at end of file +} diff --git a/src-tauri/src/models/llm.rs b/src-tauri/src/models/llm.rs index 4861e1d..af8f960 100644 --- a/src-tauri/src/models/llm.rs +++ b/src-tauri/src/models/llm.rs @@ -61,4 +61,4 @@ pub(crate) struct OpenAIModel { #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct OpenAIModelsResponse { pub data: Vec, -} \ No newline at end of file +} diff --git a/src-tauri/src/models/mod.rs b/src-tauri/src/models/mod.rs index 4207f66..2c6237a 100644 --- a/src-tauri/src/models/mod.rs +++ b/src-tauri/src/models/mod.rs @@ -1,20 +1,20 @@ // Model exports pub mod ai_agent; -pub mod project; -pub mod llm; +pub mod chat_history; pub mod file; -pub mod session; +pub mod llm; +pub mod project; pub mod prompt; +pub mod session; pub mod sub_agent; -pub mod chat_history; // Re-export all models for easy access pub use ai_agent::*; -pub use project::*; -pub use llm::*; pub use file::*; -pub use session::*; +pub use llm::*; +pub use project::*; pub use prompt::*; +pub use session::*; // Commented out until used // pub use sub_agent::*; -// pub use chat_history::*; \ No newline at end of file +// pub use chat_history::*; diff --git a/src-tauri/src/models/project.rs b/src-tauri/src/models/project.rs index 09da61b..80b137f 100644 --- a/src-tauri/src/models/project.rs +++ b/src-tauri/src/models/project.rs @@ -10,7 +10,7 @@ pub struct RecentProject { pub git_status: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ProjectsData { pub projects: Vec, } @@ -44,9 +44,15 @@ fn default_file_mentions_enabled() -> bool { true } -fn default_ui_theme() -> String { "auto".to_string() } -fn default_chat_send_shortcut() -> String { "mod+enter".to_string() } -fn default_show_welcome_recent_projects() -> bool { true } +fn default_ui_theme() -> String { + "auto".to_string() +} +fn default_chat_send_shortcut() -> String { + "mod+enter".to_string() +} +fn default_show_welcome_recent_projects() -> bool { + true +} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CodeSettings { @@ -56,12 +62,19 @@ pub struct CodeSettings { pub font_size: u16, // in px } -fn default_code_theme() -> String { "github".to_string() } -fn default_font_size() -> u16 { 14 } +fn default_code_theme() -> String { + "github".to_string() +} +fn default_font_size() -> u16 { + 14 +} impl Default for CodeSettings { fn default() -> Self { - Self { theme: default_code_theme(), font_size: default_font_size() } + Self { + theme: default_code_theme(), + font_size: default_font_size(), + } } } diff --git a/src-tauri/src/models/prompt.rs b/src-tauri/src/models/prompt.rs index 2f4a840..702b981 100644 --- a/src-tauri/src/models/prompt.rs +++ b/src-tauri/src/models/prompt.rs @@ -49,21 +49,21 @@ impl PromptTemplate { #[allow(dead_code)] pub fn render(&self, variables: &HashMap) -> String { let mut rendered = self.content.clone(); - + for (key, value) in variables { let placeholder = format!("{{{{{}}}}}", key); rendered = rendered.replace(&placeholder, value); } - + rendered } - + /// Extract all variable placeholders from the content #[allow(dead_code)] pub fn extract_variables(&self) -> Vec { let mut variables = Vec::new(); let content = &self.content; - + let mut start = 0; while let Some(open_pos) = content[start..].find("{{") { let open_pos = start + open_pos; @@ -78,20 +78,23 @@ impl PromptTemplate { break; } } - + variables } - + /// Validate that all required variables are provided #[allow(dead_code)] - pub fn validate_variables(&self, variables: &HashMap) -> Result<(), Vec> { + pub fn validate_variables( + &self, + variables: &HashMap, + ) -> Result<(), Vec> { let required_vars = self.extract_variables(); let missing_vars: Vec = required_vars .iter() .filter(|var| !variables.contains_key(*var)) .cloned() .collect(); - + if missing_vars.is_empty() { Ok(()) } else { @@ -106,13 +109,13 @@ impl PromptsConfig { pub fn get_prompt(&self, category: &str, key: &str) -> Option<&PromptTemplate> { self.prompts.get(category)?.get(key) } - + /// Get all prompts in a category #[allow(dead_code)] pub fn get_category_prompts(&self, category: &str) -> Option<&HashMap> { self.prompts.get(category) } - + /// Get all enabled categories #[allow(dead_code)] pub fn get_enabled_categories(&self) -> Vec<(&String, &PromptCategory)> { @@ -121,14 +124,17 @@ impl PromptsConfig { .filter(|(_, category)| category.enabled) .collect() } - + /// Add a new prompt to a category #[allow(dead_code)] pub fn add_prompt(&mut self, category: String, key: String, prompt: PromptTemplate) { - self.prompts.entry(category).or_default().insert(key, prompt); + self.prompts + .entry(category) + .or_default() + .insert(key, prompt); self.updated_at = chrono::Utc::now().timestamp(); } - + /// Remove a prompt from a category #[allow(dead_code)] pub fn remove_prompt(&mut self, category: &str, key: &str) -> Option { @@ -138,4 +144,4 @@ impl PromptsConfig { } removed } -} \ No newline at end of file +} diff --git a/src-tauri/src/models/session.rs b/src-tauri/src/models/session.rs index 6cddec1..bb2781b 100644 --- a/src-tauri/src/models/session.rs +++ b/src-tauri/src/models/session.rs @@ -11,9 +11,8 @@ pub struct CLISession { pub last_activity: i64, } - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SessionStatus { pub active_sessions: Vec, pub total_sessions: usize, -} \ No newline at end of file +} diff --git a/src-tauri/src/models/sub_agent.rs b/src-tauri/src/models/sub_agent.rs index 33e0770..7e41b63 100644 --- a/src-tauri/src/models/sub_agent.rs +++ b/src-tauri/src/models/sub_agent.rs @@ -16,4 +16,4 @@ pub struct SubAgentMetadata { pub description: String, pub color: Option, pub model: Option, -} \ No newline at end of file +} diff --git a/src-tauri/src/services/agent_status_service.rs b/src-tauri/src/services/agent_status_service.rs new file mode 100644 index 0000000..49aa70f --- /dev/null +++ b/src-tauri/src/services/agent_status_service.rs @@ -0,0 +1,350 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use once_cell::sync::Lazy; +use regex::Regex; +use serde_json::Value; +use tokio::process::Command; +use which::which; + +use crate::models::ai_agent::{AIAgent, AgentStatus}; + +const AGENT_DEFINITIONS: &[AgentDefinition] = &[ + AgentDefinition { + id: "claude", + command: "claude", + display_name: "Claude Code CLI", + package: Some("@anthropic-ai/claude-code"), + }, + AgentDefinition { + id: "codex", + command: "codex", + display_name: "Codex", + package: Some("@openai/codex"), + }, + AgentDefinition { + id: "gemini", + command: "gemini", + display_name: "Gemini", + package: Some("@google/gemini-cli"), + }, +]; + +#[derive(Debug, Clone)] +struct AgentDefinition { + id: &'static str, + command: &'static str, + display_name: &'static str, + package: Option<&'static str>, +} + +pub struct AgentStatusService { + probe: P, +} + +impl AgentStatusService { + pub fn new() -> Self { + Self { + probe: SystemAgentProbe, + } + } +} + +impl AgentStatusService

{ + pub fn with_probe(probe: P) -> Self { + Self { probe } + } + + pub async fn check_agents( + &self, + enabled: &HashMap, + ) -> Result { + let mut agents = Vec::new(); + + for definition in AGENT_DEFINITIONS { + let enabled_flag = *enabled.get(definition.id).unwrap_or(&true); + + if !enabled_flag { + agents.push(AIAgent { + name: definition.id.to_string(), + command: definition.command.to_string(), + display_name: definition.display_name.to_string(), + available: false, + enabled: false, + error_message: None, + installed_version: None, + latest_version: None, + upgrade_available: false, + }); + continue; + } + + let mut available = false; + let mut error_message = None; + let mut latest_version = None; + let mut upgrade_available = false; + let mut command_version = None; + let mut command_semver = None; + let mut package_version = None; + let mut package_semver = None; + let mut latest_semver = None; + + match self.probe.locate(definition.command).await { + Ok(true) => { + match self.probe.command_version(definition.command).await { + Ok(version) => { + available = true; + command_semver = + version.as_ref().and_then(|value| extract_semver(value)); + command_version = version; + } + Err(err) => { + error_message = Some(err); + } + } + + if let Some(package) = definition.package { + match self.probe.installed_package_version(package).await { + Ok(installed) => { + if let Some(ref v) = installed { + package_semver = extract_semver(v); + } + package_version = installed; + } + Err(err) => { + if error_message.is_none() { + error_message = Some(err); + } + } + } + + match self.probe.latest_package_version(package).await { + Ok(latest) => { + latest_semver = + latest.as_ref().and_then(|value| extract_semver(value)); + latest_version = latest; + } + Err(err) => { + if error_message.is_none() { + error_message = Some(err); + } + } + } + } + + if !available { + upgrade_available = true; + } + } + Ok(false) => { + error_message = + Some(format!("{} command not found in PATH", definition.command)); + upgrade_available = true; + } + Err(err) => { + error_message = Some(err); + upgrade_available = true; + } + } + + let installed_semver = package_semver.clone().or(command_semver.clone()); + + let installed_version = match (package_version.clone(), command_version.clone()) { + (Some(package), Some(command)) => { + if normalize_version_text(&package) == normalize_version_text(&command) + || command.contains(package.trim()) + { + Some(command.trim().to_string()) + } else { + Some(format!( + "{} (CLI reports {})", + package.trim(), + command.trim() + )) + } + } + (Some(package), None) => Some(package.trim().to_string()), + (None, Some(command)) => Some(command.trim().to_string()), + (None, None) => None, + }; + + if !upgrade_available { + if let (Some(installed), Some(latest)) = + (installed_semver.clone(), latest_semver.clone()) + { + if installed < latest { + upgrade_available = true; + } + } else if let (Some(installed), Some(latest)) = + (&installed_version, &latest_version) + { + if !installed.trim().is_empty() && !latest.trim().is_empty() { + upgrade_available = + normalize_version_text(installed) != normalize_version_text(latest); + } + } + } + + agents.push(AIAgent { + name: definition.id.to_string(), + command: definition.command.to_string(), + display_name: definition.display_name.to_string(), + available, + enabled: true, + error_message, + installed_version, + latest_version, + upgrade_available, + }); + } + + Ok(AgentStatus { agents }) + } +} + +fn extract_semver(text: &str) -> Option { + static SEMVER_RE: Lazy = + Lazy::new(|| Regex::new(r"(\d+\.\d+\.\d+)").expect("valid semver regex")); + + SEMVER_RE + .captures(text) + .and_then(|caps| caps.get(1)) + .and_then(|m| semver::Version::parse(m.as_str()).ok()) +} + +fn normalize_version_text(text: &str) -> String { + text.trim().to_lowercase() +} + +#[async_trait] +pub trait AgentProbe: Send + Sync { + async fn locate(&self, command: &str) -> Result; + async fn command_version(&self, command: &str) -> Result, String>; + async fn latest_package_version(&self, package: &str) -> Result, String>; + async fn installed_package_version(&self, package: &str) -> Result, String>; +} + +pub struct SystemAgentProbe; + +#[async_trait] +impl AgentProbe for SystemAgentProbe { + async fn locate(&self, command: &str) -> Result { + Ok(which(command).is_ok()) + } + + async fn command_version(&self, command: &str) -> Result, String> { + let output = Command::new(command) + .arg("--version") + .output() + .await + .map_err(|e| format!("Failed to execute {command} --version: {e}"))?; + + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let first_line = stdout.lines().next().unwrap_or("").trim(); + if first_line.is_empty() { + Ok(None) + } else { + Ok(Some(first_line.to_string())) + } + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + let stderr = stderr.trim(); + if stderr.is_empty() { + Err(format!( + "{command} --version exited with status {}", + output.status + )) + } else { + Err(stderr.to_string()) + } + } + } + + async fn latest_package_version(&self, package: &str) -> Result, String> { + if which("npm").is_err() { + return Ok(None); + } + + let output = Command::new("npm") + .args(["view", package, "version", "--json"]) + .output() + .await + .map_err(|e| format!("Failed to execute npm view {package} version: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let stderr = stderr.trim(); + if stderr.contains("E404") { + return Ok(None); + } + return Err(if stderr.is_empty() { + format!( + "npm view {package} version exited with status {}", + output.status + ) + } else { + stderr.to_string() + }); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let stdout = stdout.trim(); + if stdout.is_empty() { + return Ok(None); + } + + let parsed: Result = serde_json::from_str(stdout); + match parsed { + Ok(Value::String(v)) => Ok(Some(v)), + Ok(Value::Array(arr)) => { + let last = arr + .iter() + .rev() + .find_map(|v| v.as_str().map(|s| s.to_string())); + Ok(last) + } + _ => Ok(Some(stdout.trim_matches('"').to_string())), + } + } + + async fn installed_package_version(&self, package: &str) -> Result, String> { + if which("npm").is_err() { + return Ok(None); + } + + let output = Command::new("npm") + .args(["list", "-g", package, "--json"]) + .output() + .await + .map_err(|e| format!("Failed to execute npm list {package}: {e}"))?; + + let status_code = output.status.code().unwrap_or_default(); + if !output.status.success() && status_code != 0 && status_code != 1 { + let stderr = String::from_utf8_lossy(&output.stderr); + let stderr = stderr.trim(); + return Err(if stderr.is_empty() { + format!("npm list {package} exited with status {}", output.status) + } else { + stderr.to_string() + }); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + if stdout.trim().is_empty() { + return Ok(None); + } + + let parsed: Value = serde_json::from_str(stdout.trim()) + .map_err(|e| format!("Failed to parse npm list output for {package}: {e}"))?; + + let version = parsed + .get("dependencies") + .and_then(|deps| deps.get(package)) + .and_then(|pkg| pkg.get("version")) + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + Ok(version) + } +} diff --git a/src-tauri/src/services/chat_history_service.rs b/src-tauri/src/services/chat_history_service.rs index 00367da..c380cc4 100644 --- a/src-tauri/src/services/chat_history_service.rs +++ b/src-tauri/src/services/chat_history_service.rs @@ -1,8 +1,8 @@ -use std::path::{Path, PathBuf}; +use crate::models::chat_history::*; +use chrono::Utc; use std::fs; +use std::path::{Path, PathBuf}; use tokio::fs as async_fs; -use chrono::Utc; -use crate::models::chat_history::*; const COMMANDER_DIR: &str = ".commander"; const CHAT_HISTORY_DIR: &str = "chat_history"; @@ -14,16 +14,17 @@ pub async fn ensure_commander_directory(project_path: &str) -> Result + messages: Vec, ) -> Result, String> { if messages.is_empty() { return Ok(Vec::new()); @@ -79,13 +80,14 @@ pub async fn save_chat_session( messages: &[EnhancedChatMessage], ) -> Result<(), String> { let chat_dir = ensure_commander_directory(project_path).await?; - + // Save session messages let session_file = chat_dir.join(format!("session_{}.json", session.id)); let messages_json = serde_json::to_string_pretty(messages) .map_err(|e| format!("Failed to serialize messages: {}", e))?; - - async_fs::write(session_file, messages_json).await + + async_fs::write(session_file, messages_json) + .await .map_err(|e| format!("Failed to write session file: {}", e))?; // Update sessions index @@ -95,15 +97,19 @@ pub async fn save_chat_session( } /// Update the sessions index with a new session -async fn update_sessions_index(project_path: &str, new_session: &ChatSession) -> Result<(), String> { +async fn update_sessions_index( + project_path: &str, + new_session: &ChatSession, +) -> Result<(), String> { let chat_dir = ensure_commander_directory(project_path).await?; let index_file = chat_dir.join(SESSIONS_INDEX_FILE); // Load existing index let mut index = if index_file.exists() { - let index_content = async_fs::read_to_string(&index_file).await + let index_content = async_fs::read_to_string(&index_file) + .await .map_err(|e| format!("Failed to read sessions index: {}", e))?; - + serde_json::from_str::(&index_content) .unwrap_or_else(|_| SessionsIndex::default()) } else { @@ -117,7 +123,9 @@ async fn update_sessions_index(project_path: &str, new_session: &ChatSession) -> index.sessions.push(new_session.clone()); // Sort by start time (newest first) - index.sessions.sort_by(|a, b| b.start_time.cmp(&a.start_time)); + index + .sessions + .sort_by(|a, b| b.start_time.cmp(&a.start_time)); // Update metadata index.last_updated = Utc::now().timestamp(); @@ -126,8 +134,9 @@ async fn update_sessions_index(project_path: &str, new_session: &ChatSession) -> // Save updated index let index_json = serde_json::to_string_pretty(&index) .map_err(|e| format!("Failed to serialize sessions index: {}", e))?; - - async_fs::write(index_file, index_json).await + + async_fs::write(index_file, index_json) + .await .map_err(|e| format!("Failed to write sessions index: {}", e))?; Ok(()) @@ -146,9 +155,10 @@ pub async fn load_chat_sessions( return Ok(Vec::new()); } - let index_content = async_fs::read_to_string(&index_file).await + let index_content = async_fs::read_to_string(&index_file) + .await .map_err(|e| format!("Failed to read sessions index: {}", e))?; - + let index: SessionsIndex = serde_json::from_str(&index_content) .map_err(|e| format!("Failed to parse sessions index: {}", e))?; @@ -179,9 +189,10 @@ pub async fn load_session_messages( return Err(format!("Session file not found: {}", session_id)); } - let session_content = async_fs::read_to_string(&session_file).await + let session_content = async_fs::read_to_string(&session_file) + .await .map_err(|e| format!("Failed to read session file: {}", e))?; - + let messages: Vec = serde_json::from_str(&session_content) .map_err(|e| format!("Failed to parse session messages: {}", e))?; @@ -191,22 +202,24 @@ pub async fn load_session_messages( /// Delete a chat session pub async fn delete_chat_session(project_path: &str, session_id: &str) -> Result<(), String> { let chat_dir = ensure_commander_directory(project_path).await?; - + // Delete session file let session_file = chat_dir.join(format!("session_{}.json", session_id)); if session_file.exists() { - async_fs::remove_file(session_file).await + async_fs::remove_file(session_file) + .await .map_err(|e| format!("Failed to delete session file: {}", e))?; } // Remove from sessions index let index_file = chat_dir.join(SESSIONS_INDEX_FILE); if index_file.exists() { - let index_content = async_fs::read_to_string(&index_file).await + let index_content = async_fs::read_to_string(&index_file) + .await .map_err(|e| format!("Failed to read sessions index: {}", e))?; - - let mut index: SessionsIndex = serde_json::from_str(&index_content) - .unwrap_or_else(|_| SessionsIndex::default()); + + let mut index: SessionsIndex = + serde_json::from_str(&index_content).unwrap_or_else(|_| SessionsIndex::default()); // Remove session from index index.sessions.retain(|s| s.id != session_id); @@ -215,8 +228,9 @@ pub async fn delete_chat_session(project_path: &str, session_id: &str) -> Result // Save updated index let index_json = serde_json::to_string_pretty(&index) .map_err(|e| format!("Failed to serialize sessions index: {}", e))?; - - async_fs::write(index_file, index_json).await + + async_fs::write(index_file, index_json) + .await .map_err(|e| format!("Failed to write sessions index: {}", e))?; } @@ -260,7 +274,7 @@ pub async fn migrate_legacy_chat_data( /// Extract file mentions from content using regex pub fn extract_file_mentions(content: &str) -> Vec { use regex::Regex; - + // More comprehensive regex patterns for file detection // Note: The Rust `regex` crate does not support lookarounds, so we // capture the filename/path in group 1 and match trailing punctuation @@ -320,22 +334,38 @@ fn is_likely_file_path(text: &str) -> bool { /// Check for common filename patterns fn is_common_filename(text: &str) -> bool { let common_files = [ - "Makefile", "Dockerfile", "README", "LICENSE", "CHANGELOG", - "Cargo.toml", "package.json", "pom.xml", "build.gradle", + "Makefile", + "Dockerfile", + "README", + "LICENSE", + "CHANGELOG", + "Cargo.toml", + "package.json", + "pom.xml", + "build.gradle", ]; - - common_files.iter().any(|&file| text.eq_ignore_ascii_case(file)) + + common_files + .iter() + .any(|&file| text.eq_ignore_ascii_case(file)) } /// Check if text is likely a false positive fn is_false_positive(text: &str) -> bool { let false_positives = [ - "localhost", "127.0.0.1", "0.0.0.0", "example.com", - "www.", ".com", ".org", ".net", ".io", + "localhost", + "127.0.0.1", + "0.0.0.0", + "example.com", + "www.", + ".com", + ".org", + ".net", + ".io", ]; - - false_positives.iter().any(|&fp| text.contains(fp)) || - text.chars().all(|c| c.is_ascii_digit() || c == '.') // IP addresses + + false_positives.iter().any(|&fp| text.contains(fp)) + || text.chars().all(|c| c.is_ascii_digit() || c == '.') // IP addresses } /// Get chat history statistics @@ -344,12 +374,13 @@ pub async fn get_chat_history_stats(project_path: &str) -> Result Result Result date_range = Some((session.start_time, session.end_time)), Some((min, max)) => { - date_range = Some(( - min.min(session.start_time), - max.max(session.end_time), - )); + date_range = Some((min.min(session.start_time), max.max(session.end_time))); } } } @@ -412,10 +440,11 @@ pub async fn export_chat_history( request: ExportRequest, ) -> Result { let sessions = load_chat_sessions(project_path, None, None).await?; - + // Filter sessions if specific ones requested let sessions_to_export = if let Some(ref session_ids) = request.sessions { - sessions.into_iter() + sessions + .into_iter() .filter(|s| session_ids.contains(&s.id)) .collect() } else { @@ -423,7 +452,9 @@ pub async fn export_chat_history( }; match request.format { - ExportFormat::Json => export_as_json(&sessions_to_export, project_path, request.include_metadata).await, + ExportFormat::Json => { + export_as_json(&sessions_to_export, project_path, request.include_metadata).await + } ExportFormat::Markdown => export_as_markdown(&sessions_to_export, project_path).await, ExportFormat::Html => export_as_html(&sessions_to_export, project_path).await, ExportFormat::Csv => export_as_csv(&sessions_to_export, project_path).await, @@ -436,15 +467,19 @@ async fn export_as_json( include_metadata: bool, ) -> Result { let mut export_data = serde_json::Map::new(); - export_data.insert("export_date".to_string(), serde_json::Value::String( - chrono::Utc::now().to_rfc3339() - )); - export_data.insert("project_path".to_string(), serde_json::Value::String(project_path.to_string())); + export_data.insert( + "export_date".to_string(), + serde_json::Value::String(chrono::Utc::now().to_rfc3339()), + ); + export_data.insert( + "project_path".to_string(), + serde_json::Value::String(project_path.to_string()), + ); let mut sessions_data = Vec::new(); for session in sessions { let messages = load_session_messages(project_path, &session.id).await?; - + let session_data = if include_metadata { serde_json::json!({ "session": session, @@ -452,13 +487,18 @@ async fn export_as_json( }) } else { // Simplified format without metadata - let simple_messages: Vec<_> = messages.iter().map(|m| serde_json::json!({ - "role": m.role, - "content": m.content, - "timestamp": m.timestamp, - "agent": m.agent - })).collect(); - + let simple_messages: Vec<_> = messages + .iter() + .map(|m| { + serde_json::json!({ + "role": m.role, + "content": m.content, + "timestamp": m.timestamp, + "agent": m.agent + }) + }) + .collect(); + serde_json::json!({ "session_id": session.id, "agent": session.agent, @@ -467,30 +507,42 @@ async fn export_as_json( "messages": simple_messages }) }; - + sessions_data.push(session_data); } - export_data.insert("sessions".to_string(), serde_json::Value::Array(sessions_data)); - + export_data.insert( + "sessions".to_string(), + serde_json::Value::Array(sessions_data), + ); + serde_json::to_string_pretty(&export_data) .map_err(|e| format!("Failed to serialize export data: {}", e)) } -async fn export_as_markdown(sessions: &[ChatSession], project_path: &str) -> Result { +async fn export_as_markdown( + sessions: &[ChatSession], + project_path: &str, +) -> Result { let mut markdown = String::new(); - + markdown.push_str(&format!("# Chat History Export\n\n")); markdown.push_str(&format!("**Project:** {}\n", project_path)); - markdown.push_str(&format!("**Export Date:** {}\n\n", chrono::Utc::now().to_rfc3339())); + markdown.push_str(&format!( + "**Export Date:** {}\n\n", + chrono::Utc::now().to_rfc3339() + )); for session in sessions { let messages = load_session_messages(project_path, &session.id).await?; let session_date = chrono::DateTime::from_timestamp(session.start_time, 0) .unwrap_or_default() .format("%Y-%m-%d %H:%M:%S"); - - markdown.push_str(&format!("## Session: {} ({})\n\n", session.summary, session_date)); + + markdown.push_str(&format!( + "## Session: {} ({})\n\n", + session.summary, session_date + )); markdown.push_str(&format!("**Agent:** {}\n", session.agent)); if let Some(ref branch) = session.branch { markdown.push_str(&format!("**Branch:** {}\n", branch)); @@ -503,7 +555,7 @@ async fn export_as_markdown(sessions: &[ChatSession], project_path: &str) -> Res "assistant" => "🤖 **Assistant**", _ => &message.role, }; - + markdown.push_str(&format!("{}\n\n", role_display)); markdown.push_str(&format!("{}\n\n", message.content)); markdown.push_str("---\n\n"); @@ -532,10 +584,10 @@ mod tests { fn test_file_mention_extraction() { let content = "Check src/main.rs and ./config.json, also look at Makefile"; let mentions = extract_file_mentions(content); - + println!("Debug: Extracted mentions: {:?}", mentions); println!("Debug: Looking for: src/main.rs, ./config.json, Makefile"); - + assert!(mentions.contains(&"src/main.rs".to_string())); assert!(mentions.contains(&"./config.json".to_string())); assert!(mentions.contains(&"Makefile".to_string())); @@ -545,7 +597,7 @@ mod tests { fn test_false_positive_filtering() { let content = "Visit https://example.com and 192.168.1.1"; let mentions = extract_file_mentions(content); - + assert!(!mentions.iter().any(|m| m.contains("https"))); assert!(!mentions.iter().any(|m| m.contains("192.168"))); } diff --git a/src-tauri/src/services/cli_command_builder.rs b/src-tauri/src/services/cli_command_builder.rs new file mode 100644 index 0000000..2aead5c --- /dev/null +++ b/src-tauri/src/services/cli_command_builder.rs @@ -0,0 +1,39 @@ +use crate::models::ai_agent::AgentSettings; +use crate::services::execution_mode_service::{codex_flags_for_mode, ExecutionMode}; + +/// Build command-line arguments for invoking the Codex CLI. +/// +/// The resulting vector does **not** include the `codex` program name itself – +/// callers should prepend it when spawning the process. The helper keeps +/// responsibility scoped to pure argument construction so it can be reused and +/// unit-tested in isolation. +pub fn build_codex_command_args( + message: &str, + execution_mode: Option, + unsafe_full: bool, + settings: Option<&AgentSettings>, +) -> Vec { + let mut args = vec!["exec".to_string()]; + + if !message.trim().is_empty() { + args.push(message.to_string()); + } + + if let Some(agent_settings) = settings { + if let Some(model) = agent_settings.model.as_ref() { + if !model.is_empty() { + args.push("--model".to_string()); + args.push(model.clone()); + } + } + } + + if let Some(mode) = execution_mode { + let bypass = unsafe_full && matches!(mode, ExecutionMode::Full); + args.extend(codex_flags_for_mode(mode, bypass)); + } + + args.push("--skip-git-repo-check".to_string()); + + args +} diff --git a/src-tauri/src/services/cli_output_service.rs b/src-tauri/src/services/cli_output_service.rs new file mode 100644 index 0000000..20d6f94 --- /dev/null +++ b/src-tauri/src/services/cli_output_service.rs @@ -0,0 +1,114 @@ +pub fn sanitize_cli_output_line(agent: &str, line: &str) -> Option { + if !agent.eq_ignore_ascii_case("codex") { + return Some(line.to_string()); + } + + let trimmed = line.trim(); + + // Known Node.js warnings emitted by @openai/codex when using older dependencies. + // Only drop lines that match the warning text exactly so we don't swallow + // legitimate agent output that happens to include similar words. + let is_known_warning = trimmed + == "(Use `node --trace-warnings ...` to show where the warning was created)" + || (trimmed.starts_with("(node:") + && trimmed.ends_with("inside circular dependency") + && (trimmed.contains("Warning: Accessing non-existent property 'lineno'") + || trimmed.contains("Warning: Accessing non-existent property 'filename'"))); + + if is_known_warning { + return None; + } + + Some(line.to_string()) +} + +/// Incrementally splits Codex CLI output into discrete JSON messages. +/// +/// Codex streams often emit carriage returns (\r) instead of newlines which causes +/// standard line-based readers to block until the command finishes. This accumulator +/// collects raw chunks and emits complete payloads whenever it sees `\r`, `\n` or +/// `\r\n`, while buffering partial fragments for the next chunk. +#[derive(Default)] +pub struct CodexStreamAccumulator { + buffer: String, +} + +impl CodexStreamAccumulator { + pub fn new() -> Self { + Self::default() + } + + pub fn push_chunk(&mut self, chunk: &str) -> Vec { + if chunk.is_empty() { + return Vec::new(); + } + + self.buffer.push_str(chunk); + + let mut results = Vec::new(); + let mut start = 0usize; + let bytes = self.buffer.as_bytes(); + let mut idx = 0usize; + + while idx < bytes.len() { + match bytes[idx] { + b'\r' | b'\n' => { + if start < idx { + self.process_segment(&self.buffer[start..idx], &mut results); + } + + // Skip consecutive separators so \r\n or multiple \r don't produce empty chunks + idx += 1; + while idx < bytes.len() && (bytes[idx] == b'\r' || bytes[idx] == b'\n') { + idx += 1; + } + + start = idx; + } + _ => { + idx += 1; + } + } + } + + if start > 0 { + // Drop everything up to the last processed separator, keep remainder buffered + self.buffer.drain(..start); + } + + results + } + + pub fn flush(&mut self) -> Option { + if self.buffer.is_empty() { + None + } else { + let mut results = Vec::new(); + let remaining: String = self.buffer.drain(..).collect(); + self.process_segment(&remaining, &mut results); + results.pop() + } + } + + fn process_segment(&self, segment: &str, results: &mut Vec) { + let trimmed = segment.trim(); + if trimmed.is_empty() { + return; + } + + if let Some(rest) = trimmed.strip_prefix("data:") { + let data = rest.trim(); + if data.is_empty() || data.eq_ignore_ascii_case("[DONE]") { + return; + } + results.push(data.to_string()); + return; + } + + if trimmed.starts_with("event:") || trimmed.starts_with("id:") { + return; + } + + results.push(trimmed.to_string()); + } +} diff --git a/src-tauri/src/services/codex_sdk_service.rs b/src-tauri/src/services/codex_sdk_service.rs new file mode 100644 index 0000000..851444f --- /dev/null +++ b/src-tauri/src/services/codex_sdk_service.rs @@ -0,0 +1,44 @@ +use crate::services::execution_mode_service::ExecutionMode; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CodexThreadPreferences { + pub sandbox_mode: Option, + pub skip_git_repo_check: bool, +} + +impl Default for CodexThreadPreferences { + fn default() -> Self { + Self { + sandbox_mode: Some("workspace-write".to_string()), + skip_git_repo_check: true, + } + } +} + +pub fn build_codex_thread_prefs( + execution_mode: Option, + dangerous_bypass: bool, +) -> CodexThreadPreferences { + let mut prefs = CodexThreadPreferences::default(); + + match execution_mode { + Some(ExecutionMode::Chat) => { + prefs.sandbox_mode = Some("read-only".to_string()); + } + Some(ExecutionMode::Collab) => { + prefs.sandbox_mode = Some("workspace-write".to_string()); + } + Some(ExecutionMode::Full) => { + if dangerous_bypass { + prefs.sandbox_mode = Some("danger-full-access".to_string()); + } else { + prefs.sandbox_mode = Some("workspace-write".to_string()); + } + } + None => { + prefs.sandbox_mode = Some("workspace-write".to_string()); + } + } + + prefs +} diff --git a/src-tauri/src/services/execution_mode_service.rs b/src-tauri/src/services/execution_mode_service.rs index 6e0101d..a090d94 100644 --- a/src-tauri/src/services/execution_mode_service.rs +++ b/src-tauri/src/services/execution_mode_service.rs @@ -1,8 +1,8 @@ #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ExecutionMode { - Chat, // read-only, no writes - Collab, // asks for approval - Full, // auto execute (low friction) + Chat, // read-only, no writes + Collab, // asks for approval + Full, // auto execute (low friction) } impl ExecutionMode { @@ -20,14 +20,8 @@ impl ExecutionMode { /// When `unsafe_full` is true and `mode` is Full, we use the fully unsandboxed flag. pub fn codex_flags_for_mode(mode: ExecutionMode, unsafe_full: bool) -> Vec { match mode { - ExecutionMode::Chat => vec![ - "--sandbox".into(), "read-only".into(), - "--ask-for-approval".into(), "never".into(), - ], - ExecutionMode::Collab => vec![ - "--sandbox".into(), "workspace-write".into(), - "--ask-for-approval".into(), "on-request".into(), - ], + ExecutionMode::Chat => vec!["--sandbox".into(), "read-only".into()], + ExecutionMode::Collab => vec!["--sandbox".into(), "workspace-write".into()], ExecutionMode::Full => { if unsafe_full { vec!["--dangerously-bypass-approvals-and-sandbox".into()] diff --git a/src-tauri/src/services/file_service.rs b/src-tauri/src/services/file_service.rs index b86e95f..57ff723 100644 --- a/src-tauri/src/services/file_service.rs +++ b/src-tauri/src/services/file_service.rs @@ -11,4 +11,3 @@ pub fn read_file_content(path: &str) -> Result { } fs::read_to_string(p).map_err(|e| format!("Failed to read file {}: {}", path, e)) } - diff --git a/src-tauri/src/services/git_service.rs b/src-tauri/src/services/git_service.rs index 586c50d..a7eb8b0 100644 --- a/src-tauri/src/services/git_service.rs +++ b/src-tauri/src/services/git_service.rs @@ -1,6 +1,6 @@ +use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; -use std::fs; /// Check if a directory is a valid Git repository by looking for .git folder pub fn is_valid_git_repository(project_path: &str) -> bool { @@ -64,15 +64,23 @@ pub fn find_git_root(current_path: &str) -> Option { if dotgit.is_file() { // Worktree: .git is a file with a `gitdir:` pointer. if let Ok(content) = fs::read_to_string(&dotgit) { - if let Some(gitdir_line) = content.lines().find(|line| line.starts_with("gitdir:")) { + if let Some(gitdir_line) = content.lines().find(|line| line.starts_with("gitdir:")) + { let gitdir = gitdir_line.trim_start_matches("gitdir:").trim(); let gitdir_path: PathBuf = { let p = Path::new(gitdir); - if p.is_absolute() { p.to_path_buf() } else { ancestor.join(p) } + if p.is_absolute() { + p.to_path_buf() + } else { + ancestor.join(p) + } }; // Find the main repo's .git directory by walking up from gitdir - if let Some(main_git_dir) = gitdir_path.ancestors().find(|p| p.file_name().map(|n| n == ".git").unwrap_or(false)) { + if let Some(main_git_dir) = gitdir_path + .ancestors() + .find(|p| p.file_name().map(|n| n == ".git").unwrap_or(false)) + { if let Some(repo_root) = main_git_dir.parent() { return Some(repo_root.to_string_lossy().into_owned()); } @@ -89,16 +97,16 @@ pub fn find_git_root(current_path: &str) -> Option { /// Returns the main repository root path if found, current path if it's a valid repo pub fn resolve_git_project_path(current_path: &str) -> Option { let path = Path::new(current_path); - + // First check if current path has git if !path.join(".git").exists() { // Not a git repo, return None return None; } - + // Check if .git is a file (worktree) or directory (regular repo) let git_path = path.join(".git"); - + if git_path.is_file() { // This is likely a worktree - read the .git file to find main repo if let Ok(content) = fs::read_to_string(&git_path) { @@ -119,6 +127,6 @@ pub fn resolve_git_project_path(current_path: &str) -> Option { // Regular git repository return Some(current_path.to_string()); } - + None } diff --git a/src-tauri/src/services/llm_service.rs b/src-tauri/src/services/llm_service.rs index b8307e2..7335502 100644 --- a/src-tauri/src/services/llm_service.rs +++ b/src-tauri/src/services/llm_service.rs @@ -5,7 +5,7 @@ use tauri_plugin_store::StoreExt; /// Get default LLM settings pub fn get_default_llm_settings() -> LLMSettings { let mut providers = HashMap::new(); - + // Default OpenRouter provider let openrouter_provider = LLMProvider { id: "openrouter".to_string(), @@ -66,7 +66,7 @@ pub fn get_default_llm_settings() -> LLMSettings { selected_model: None, }; providers.insert("openai".to_string(), openai_provider); - + LLMSettings { active_provider: "openrouter".to_string(), providers, @@ -77,7 +77,7 @@ pub fn get_default_llm_settings() -> LLMSettings { /// Fetch available models from OpenRouter API pub async fn fetch_openrouter_models(api_key: &str) -> Result, String> { let client = reqwest::Client::new(); - + let response = client .get("https://openrouter.ai/api/v1/models") .header("Authorization", format!("Bearer {}", api_key)) @@ -95,10 +95,12 @@ pub async fn fetch_openrouter_models(api_key: &str) -> Result, Str .await .map_err(|e| format!("Failed to parse response: {}", e))?; - let models = openrouter_response.data + let models = openrouter_response + .data .into_iter() .map(|model| { - let (input_cost, output_cost) = model.pricing + let (input_cost, output_cost) = model + .pricing .as_ref() .map(|p| { let input = p.prompt.as_ref().and_then(|s| s.parse::().ok()); @@ -124,7 +126,7 @@ pub async fn fetch_openrouter_models(api_key: &str) -> Result, Str /// Fetch available models from OpenAI API pub async fn fetch_openai_models(api_key: &str) -> Result, String> { let client = reqwest::Client::new(); - + let response = client .get("https://api.openai.com/v1/models") .header("Authorization", format!("Bearer {}", api_key)) @@ -145,14 +147,17 @@ pub async fn fetch_openai_models(api_key: &str) -> Result, String> let mut models = Vec::new(); for model in openai_response.data { // Filter for GPT models and other important ones - if model.id.contains("gpt") || model.id.contains("davinci") || model.id.contains("text-embedding") { + if model.id.contains("gpt") + || model.id.contains("davinci") + || model.id.contains("text-embedding") + { models.push(LLMModel { id: model.id.clone(), name: model.id.clone(), description: Some(format!("OpenAI model owned by {}", model.owned_by)), context_length: None, // OpenAI doesn't provide this in the models endpoint - input_cost: None, // Would need to be manually configured - output_cost: None, // Would need to be manually configured + input_cost: None, // Would need to be manually configured + output_cost: None, // Would need to be manually configured }); } } @@ -164,22 +169,31 @@ pub async fn fetch_openai_models(api_key: &str) -> Result, String> } /// Save LLM settings to store -pub async fn save_llm_settings(app: &tauri::AppHandle, settings: &LLMSettings) -> Result<(), String> { - let store = app.store("settings.json").map_err(|e| format!("Failed to access store: {}", e))?; - +pub async fn save_llm_settings( + app: &tauri::AppHandle, + settings: &LLMSettings, +) -> Result<(), String> { + let store = app + .store("settings.json") + .map_err(|e| format!("Failed to access store: {}", e))?; + let serialized = serde_json::to_value(settings) .map_err(|e| format!("Failed to serialize LLM settings: {}", e))?; - + store.set("llm_settings", serialized); - store.save().map_err(|e| format!("Failed to save store: {}", e))?; + store + .save() + .map_err(|e| format!("Failed to save store: {}", e))?; Ok(()) } /// Load LLM settings from store pub async fn load_llm_settings(app: &tauri::AppHandle) -> Result { - let store = app.store("settings.json").map_err(|e| format!("Failed to access store: {}", e))?; - + let store = app + .store("settings.json") + .map_err(|e| format!("Failed to access store: {}", e))?; + let settings = store .get("llm_settings") .and_then(|v| serde_json::from_value(v.clone()).ok()) @@ -187,4 +201,3 @@ pub async fn load_llm_settings(app: &tauri::AppHandle) -> Result project_path.exists() } - /// Add a project to the recent projects list -pub async fn add_project_to_recent_projects(app: &tauri::AppHandle, project_path: String) -> Result<(), String> { +pub async fn add_project_to_recent_projects( + app: &tauri::AppHandle, + project_path: String, +) -> Result<(), String> { // Align with commands recent projects store: keep "projects" as an ARRAY of RecentProject let store = app .store("recent-projects.json") @@ -77,7 +79,9 @@ pub async fn add_project_to_recent_projects(app: &tauri::AppHandle, project_path let serialized = serde_json::to_value(&existing) .map_err(|e| format!("Failed to serialize projects: {}", e))?; store.set("projects", serialized); - store.save().map_err(|e| format!("Failed to save store: {}", e))?; + store + .save() + .map_err(|e| format!("Failed to save store: {}", e))?; Ok(()) } @@ -148,7 +152,9 @@ pub async fn open_existing_project( let serialized = serde_json::to_value(&updated) .map_err(|e| format!("Failed to serialize projects: {}", e))?; store.set("projects", serialized); - store.save().map_err(|e| format!("Failed to save store: {}", e))?; + store + .save() + .map_err(|e| format!("Failed to save store: {}", e))?; // Set active working directory std::env::set_current_dir(&project_path) diff --git a/src-tauri/src/services/prompt_service.rs b/src-tauri/src/services/prompt_service.rs index ba7749e..4028d5b 100644 --- a/src-tauri/src/services/prompt_service.rs +++ b/src-tauri/src/services/prompt_service.rs @@ -6,7 +6,7 @@ use tauri_plugin_store::StoreExt; pub fn get_default_prompts() -> PromptsConfig { let mut categories = HashMap::new(); let mut prompts = HashMap::new(); - + // Plan Mode Category categories.insert( "plan_mode".to_string(), @@ -14,9 +14,9 @@ pub fn get_default_prompts() -> PromptsConfig { name: "Plan Mode".to_string(), description: "Prompts for plan generation and execution".to_string(), enabled: true, - } + }, ); - + let plan_prompts = HashMap::from([ ("system".to_string(), PromptTemplate { name: "Plan Generation System Prompt".to_string(), @@ -81,7 +81,7 @@ Please create a detailed execution plan for this request."#.to_string(), }), ]); prompts.insert("plan_mode".to_string(), plan_prompts); - + // Agent Execution Category categories.insert( "agent_execution".to_string(), @@ -89,9 +89,9 @@ Please create a detailed execution plan for this request."#.to_string(), name: "Agent Execution".to_string(), description: "Prompts used when executing tasks with AI agents".to_string(), enabled: true, - } + }, ); - + let execution_prompts = HashMap::from([ ("claude_system".to_string(), PromptTemplate { name: "Claude Code CLI System Prompt".to_string(), @@ -184,7 +184,7 @@ When assisting with development: }), ]); prompts.insert("agent_execution".to_string(), execution_prompts); - + // Code Analysis Category categories.insert( "code_analysis".to_string(), @@ -192,14 +192,16 @@ When assisting with development: name: "Code Analysis".to_string(), description: "Prompts for code review and analysis tasks".to_string(), enabled: true, - } + }, ); - + let analysis_prompts = HashMap::from([ - ("review_checklist".to_string(), PromptTemplate { - name: "Code Review Checklist".to_string(), - description: "Comprehensive code review prompt template".to_string(), - content: r#"Please review the following code and provide feedback on: + ( + "review_checklist".to_string(), + PromptTemplate { + name: "Code Review Checklist".to_string(), + description: "Comprehensive code review prompt template".to_string(), + content: r#"Please review the following code and provide feedback on: **Code Quality:** - [ ] Code readability and maintainability @@ -228,16 +230,20 @@ When assisting with development: Code to review: {{code_content}} -Please provide specific, actionable feedback with examples where appropriate."#.to_string(), - category: "code_analysis".to_string(), - variables: vec!["code_content".to_string()], - created_at: chrono::Utc::now().timestamp(), - updated_at: chrono::Utc::now().timestamp(), - }), - ("performance_analysis".to_string(), PromptTemplate { - name: "Performance Analysis Template".to_string(), - description: "Template for analyzing code performance".to_string(), - content: r#"Analyze the performance characteristics of this code: +Please provide specific, actionable feedback with examples where appropriate."# + .to_string(), + category: "code_analysis".to_string(), + variables: vec!["code_content".to_string()], + created_at: chrono::Utc::now().timestamp(), + updated_at: chrono::Utc::now().timestamp(), + }, + ), + ( + "performance_analysis".to_string(), + PromptTemplate { + name: "Performance Analysis Template".to_string(), + description: "Template for analyzing code performance".to_string(), + content: r#"Analyze the performance characteristics of this code: **Performance Analysis for:** {{component_name}} @@ -260,15 +266,17 @@ Please provide specific, actionable feedback with examples where appropriate."#. **Trade-offs:** - Discuss performance vs. readability trade-offs - Memory vs. speed considerations -- Maintenance implications of optimizations"#.to_string(), - category: "code_analysis".to_string(), - variables: vec!["component_name".to_string(), "code_content".to_string()], - created_at: chrono::Utc::now().timestamp(), - updated_at: chrono::Utc::now().timestamp(), - }), +- Maintenance implications of optimizations"# + .to_string(), + category: "code_analysis".to_string(), + variables: vec!["component_name".to_string(), "code_content".to_string()], + created_at: chrono::Utc::now().timestamp(), + updated_at: chrono::Utc::now().timestamp(), + }, + ), ]); prompts.insert("code_analysis".to_string(), analysis_prompts); - + PromptsConfig { categories, prompts, @@ -279,9 +287,10 @@ Please provide specific, actionable feedback with examples where appropriate."#. /// Load prompts from store pub async fn load_prompts(app: &tauri::AppHandle) -> Result { - let store = app.store("prompts.json") + let store = app + .store("prompts.json") .map_err(|e| format!("Failed to access prompts store: {}", e))?; - + let prompts = store .get("prompts_config") .and_then(|v| serde_json::from_value(v.clone()).ok()) @@ -292,32 +301,35 @@ pub async fn load_prompts(app: &tauri::AppHandle) -> Result Result<(), String> { - let store = app.store("prompts.json") + let store = app + .store("prompts.json") .map_err(|e| format!("Failed to access prompts store: {}", e))?; - + let serialized = serde_json::to_value(prompts) .map_err(|e| format!("Failed to serialize prompts config: {}", e))?; - + store.set("prompts_config", serialized); - store.save().map_err(|e| format!("Failed to save prompts store: {}", e))?; + store + .save() + .map_err(|e| format!("Failed to save prompts store: {}", e))?; Ok(()) } /// Update a specific prompt pub async fn update_prompt( - app: &tauri::AppHandle, - category: &str, - key: &str, - prompt: &PromptTemplate + app: &tauri::AppHandle, + category: &str, + key: &str, + prompt: &PromptTemplate, ) -> Result<(), String> { let mut config = load_prompts(app).await?; - + if let Some(category_prompts) = config.prompts.get_mut(category) { let mut updated_prompt = prompt.clone(); updated_prompt.updated_at = chrono::Utc::now().timestamp(); category_prompts.insert(key.to_string(), updated_prompt); - + config.updated_at = chrono::Utc::now().timestamp(); save_prompts(app, &config).await?; Ok(()) @@ -330,17 +342,20 @@ pub async fn update_prompt( pub async fn delete_prompt( app: &tauri::AppHandle, category: &str, - key: &str + key: &str, ) -> Result<(), String> { let mut config = load_prompts(app).await?; - + if let Some(category_prompts) = config.prompts.get_mut(category) { if category_prompts.remove(key).is_some() { config.updated_at = chrono::Utc::now().timestamp(); save_prompts(app, &config).await?; Ok(()) } else { - Err(format!("Prompt '{}' not found in category '{}'", key, category)) + Err(format!( + "Prompt '{}' not found in category '{}'", + key, category + )) } } else { Err(format!("Category '{}' not found", category)) @@ -351,20 +366,20 @@ pub async fn delete_prompt( pub async fn create_category( app: &tauri::AppHandle, category: &str, - description: &str + description: &str, ) -> Result<(), String> { let mut config = load_prompts(app).await?; - + let new_category = PromptCategory { name: category.to_string(), description: description.to_string(), enabled: true, }; - + config.categories.insert(category.to_string(), new_category); config.prompts.insert(category.to_string(), HashMap::new()); config.updated_at = chrono::Utc::now().timestamp(); - + save_prompts(app, &config).await?; Ok(()) -} \ No newline at end of file +} diff --git a/src-tauri/src/services/sub_agent_service.rs b/src-tauri/src/services/sub_agent_service.rs index 7bd73b5..5416757 100644 --- a/src-tauri/src/services/sub_agent_service.rs +++ b/src-tauri/src/services/sub_agent_service.rs @@ -1,7 +1,7 @@ +use crate::models::sub_agent::{SubAgent, SubAgentMetadata}; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; -use crate::models::sub_agent::{SubAgent, SubAgentMetadata}; pub struct SubAgentService; @@ -9,7 +9,7 @@ impl SubAgentService { /// Load all sub-agents from the user's home directory pub async fn load_all_sub_agents() -> Result, String> { let mut all_agents = Vec::new(); - + // Define the possible agent directories for different CLI tools let agent_paths = vec![ "~/.claude/agents", @@ -19,22 +19,26 @@ impl SubAgentService { "~/codex/agents", "~/gemini/agents", ]; - + for path_str in agent_paths { let expanded_path = Self::expand_tilde(path_str)?; if let Ok(agents) = Self::load_agents_from_directory(&expanded_path).await { all_agents.extend(agents); } } - + Ok(all_agents) } /// Save the full content to the given agent file path pub fn save_agent_file(file_path: &Path, content: &str) -> Result<(), String> { - let parent = file_path.parent().ok_or_else(|| "Invalid file path".to_string())?; - fs::create_dir_all(parent).map_err(|e| format!("Failed to create directory {}: {}", parent.display(), e))?; - fs::write(file_path, content).map_err(|e| format!("Failed to write file {}: {}", file_path.display(), e)) + let parent = file_path + .parent() + .ok_or_else(|| "Invalid file path".to_string())?; + fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create directory {}: {}", parent.display(), e))?; + fs::write(file_path, content) + .map_err(|e| format!("Failed to write file {}: {}", file_path.display(), e)) } /// Create a new agent file under the user's home directory for the given CLI @@ -50,15 +54,26 @@ impl SubAgentService { // Prefer hidden directory: ~/.{cli}/agents let base_hidden = format!("~/.{}/agents", cli_name); let target_dir = Self::expand_tilde(&base_hidden)?; - fs::create_dir_all(&target_dir) - .map_err(|e| format!("Failed to create agents directory {}: {}", target_dir.display(), e))?; + fs::create_dir_all(&target_dir).map_err(|e| { + format!( + "Failed to create agents directory {}: {}", + target_dir.display(), + e + ) + })?; let file_path = target_dir.join(format!("{}.md", slug)); let mut frontmatter = String::from("---\n"); frontmatter.push_str(&format!("name: {}\n", name)); - if let Some(d) = description.as_ref() { frontmatter.push_str(&format!("description: {}\n", d)); } - if let Some(c) = color.as_ref() { frontmatter.push_str(&format!("color: {}\n", c)); } - if let Some(m) = model.as_ref() { frontmatter.push_str(&format!("model: {}\n", m)); } + if let Some(d) = description.as_ref() { + frontmatter.push_str(&format!("description: {}\n", d)); + } + if let Some(c) = color.as_ref() { + frontmatter.push_str(&format!("color: {}\n", c)); + } + if let Some(m) = model.as_ref() { + frontmatter.push_str(&format!("model: {}\n", m)); + } frontmatter.push_str("---\n"); let full = format!("{}{}", frontmatter, content); @@ -90,7 +105,7 @@ impl SubAgentService { format!("~/.{}/agents", cli_name), format!("~/{}/agents", cli_name), ]; - + let mut agents = Vec::new(); for path_str in paths { let expanded_path = Self::expand_tilde(&path_str)?; @@ -98,25 +113,25 @@ impl SubAgentService { agents.extend(found_agents); } } - + Ok(agents) } - + /// Load agents from a specific directory async fn load_agents_from_directory(dir_path: &Path) -> Result, String> { if !dir_path.exists() { return Ok(Vec::new()); } - + let mut agents = Vec::new(); - - let entries = fs::read_dir(dir_path) - .map_err(|e| format!("Failed to read directory: {}", e))?; - + + let entries = + fs::read_dir(dir_path).map_err(|e| format!("Failed to read directory: {}", e))?; + for entry in entries { let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?; let path = entry.path(); - + // Only process .md files if path.extension().and_then(|s| s.to_str()) == Some("md") { if let Ok(agent) = Self::parse_agent_file(&path).await { @@ -124,18 +139,18 @@ impl SubAgentService { } } } - + Ok(agents) } - + /// Parse a single agent markdown file async fn parse_agent_file(file_path: &Path) -> Result { let content = fs::read_to_string(file_path) .map_err(|e| format!("Failed to read file {}: {}", file_path.display(), e))?; - + // Parse the frontmatter and content let (metadata, agent_content) = Self::parse_frontmatter(&content)?; - + Ok(SubAgent { name: metadata.name, description: metadata.description, @@ -145,15 +160,15 @@ impl SubAgentService { file_path: file_path.to_string_lossy().to_string(), }) } - + /// Parse frontmatter from markdown content fn parse_frontmatter(content: &str) -> Result<(SubAgentMetadata, String), String> { let lines: Vec<&str> = content.lines().collect(); - + // Find the frontmatter boundaries let mut start_idx = None; let mut end_idx = None; - + for (i, line) in lines.iter().enumerate() { if line.trim() == "---" { if start_idx.is_none() { @@ -164,12 +179,12 @@ impl SubAgentService { } } } - + let (start_idx, end_idx) = match (start_idx, end_idx) { (Some(s), Some(e)) if s < e => (s, e), _ => return Err("Invalid frontmatter format".to_string()), }; - + // Parse the frontmatter let mut metadata = SubAgentMetadata { name: String::new(), @@ -177,7 +192,7 @@ impl SubAgentService { color: None, model: None, }; - + for i in (start_idx + 1)..end_idx { let line = lines[i]; if let Some((key, value)) = Self::parse_yaml_line(line) { @@ -190,14 +205,14 @@ impl SubAgentService { } } } - + // Get the content after frontmatter let content_lines = &lines[(end_idx + 1)..]; let agent_content = content_lines.join("\n").trim().to_string(); - + Ok((metadata, agent_content)) } - + /// Parse a single YAML line from frontmatter fn parse_yaml_line(line: &str) -> Option<(String, String)> { let parts: Vec<&str> = line.splitn(2, ':').collect(); @@ -209,12 +224,12 @@ impl SubAgentService { None } } - + /// Expand tilde in path to user's home directory fn expand_tilde(path: &str) -> Result { if path.starts_with("~") { - let home = home::home_dir() - .ok_or_else(|| "Failed to get home directory".to_string())?; + let home = + home::home_dir().ok_or_else(|| "Failed to get home directory".to_string())?; let path_without_tilde = &path[1..]; let path_without_tilde = path_without_tilde.trim_start_matches('/'); Ok(home.join(path_without_tilde)) @@ -226,7 +241,7 @@ impl SubAgentService { /// Get agents grouped by their CLI tool pub async fn get_agents_by_cli() -> Result>, String> { let mut grouped_agents: HashMap> = HashMap::new(); - + // Load agents for each CLI tool for cli in &["claude", "codex", "gemini"] { let agents = Self::load_agents_for_cli(cli).await?; @@ -234,7 +249,7 @@ impl SubAgentService { grouped_agents.insert(cli.to_string(), agents); } } - + Ok(grouped_agents) } @@ -247,7 +262,9 @@ impl SubAgentService { // Only allow deleting files under ~/./agents or ~//agents let home = home::home_dir().ok_or_else(|| "Failed to get home directory".to_string())?; - let normalized = p.canonicalize().map_err(|e| format!("Failed to resolve path: {}", e))?; + let normalized = p + .canonicalize() + .map_err(|e| format!("Failed to resolve path: {}", e))?; if !normalized.starts_with(&home) { return Err("Refusing to delete file outside home directory".to_string()); @@ -258,6 +275,7 @@ impl SubAgentService { return Err("Refusing to delete file outside agents directory".to_string()); } - fs::remove_file(&normalized).map_err(|e| format!("Failed to delete file {}: {}", normalized.display(), e)) + fs::remove_file(&normalized) + .map_err(|e| format!("Failed to delete file {}: {}", normalized.display(), e)) } } diff --git a/src-tauri/src/tests/chat_history.rs b/src-tauri/src/tests/chat_history.rs index 20f8a33..3aea1b4 100644 --- a/src-tauri/src/tests/chat_history.rs +++ b/src-tauri/src/tests/chat_history.rs @@ -2,30 +2,35 @@ mod tests { use crate::models::chat_history::*; use crate::services::chat_history_service::{ - ensure_commander_directory, group_messages_into_sessions, - save_chat_session, load_chat_sessions, load_session_messages, - delete_chat_session, migrate_legacy_chat_data, - extract_file_mentions, + delete_chat_session, ensure_commander_directory, extract_file_mentions, + group_messages_into_sessions, load_chat_sessions, load_session_messages, + migrate_legacy_chat_data, save_chat_session, }; + use chrono::Utc; use std::fs; use std::path::PathBuf; use tempfile::TempDir; - use chrono::Utc; fn create_test_project_dir() -> TempDir { let temp_dir = TempDir::new().expect("Failed to create temp directory"); // Initialize as git repo let git_dir = temp_dir.path().join(".git"); fs::create_dir_all(&git_dir).expect("Failed to create .git directory"); - + // Create a basic git config to mark as valid repo let config_file = git_dir.join("config"); - fs::write(config_file, "[core]\nrepositoryformatversion = 0\n").expect("Failed to write git config"); - + fs::write(config_file, "[core]\nrepositoryformatversion = 0\n") + .expect("Failed to write git config"); + temp_dir } - fn create_test_message(role: &str, content: &str, agent: &str, timestamp_offset: i64) -> EnhancedChatMessage { + fn create_test_message( + role: &str, + content: &str, + agent: &str, + timestamp_offset: i64, + ) -> EnhancedChatMessage { let base_time = Utc::now().timestamp() - 3600; // 1 hour ago as base EnhancedChatMessage { id: uuid::Uuid::new_v4().to_string(), @@ -48,11 +53,20 @@ mod tests { let project_path = temp_dir.path().to_string_lossy().to_string(); let result = ensure_commander_directory(&project_path).await; - assert!(result.is_ok(), "Should create .commander directory successfully"); + assert!( + result.is_ok(), + "Should create .commander directory successfully" + ); let commander_dir = temp_dir.path().join(".commander").join("chat_history"); - assert!(commander_dir.exists(), ".commander/chat_history directory should exist"); - assert!(commander_dir.is_dir(), ".commander/chat_history should be a directory"); + assert!( + commander_dir.exists(), + ".commander/chat_history directory should exist" + ); + assert!( + commander_dir.is_dir(), + ".commander/chat_history should be a directory" + ); } #[tokio::test] @@ -64,14 +78,18 @@ mod tests { let messages = vec![ create_test_message("user", "First message", "claude", 0), create_test_message("assistant", "First response", "claude", 60), // 1 min later - create_test_message("user", "Second message", "claude", 120), // 2 min later + create_test_message("user", "Second message", "claude", 120), // 2 min later // 10 minute gap - should create new session create_test_message("user", "New session message", "claude", 720), // 12 min later create_test_message("assistant", "New session response", "claude", 780), ]; let sessions = group_messages_into_sessions(messages).await.unwrap(); - assert_eq!(sessions.len(), 2, "Should create 2 sessions due to time gap"); + assert_eq!( + sessions.len(), + 2, + "Should create 2 sessions due to time gap" + ); // First session should have 3 messages assert_eq!(sessions[0].message_count, 3); @@ -96,7 +114,11 @@ mod tests { ]; let sessions = group_messages_into_sessions(messages).await.unwrap(); - assert_eq!(sessions.len(), 2, "Should create 2 sessions for different agents"); + assert_eq!( + sessions.len(), + 2, + "Should create 2 sessions for different agents" + ); // Sessions should be for different agents let claude_session = sessions.iter().find(|s| s.agent == "claude").unwrap(); @@ -116,7 +138,9 @@ mod tests { create_test_message("assistant", "Test response", "claude", 60), ]; - let sessions = group_messages_into_sessions(messages.clone()).await.unwrap(); + let sessions = group_messages_into_sessions(messages.clone()) + .await + .unwrap(); let session = &sessions[0]; // Save the session @@ -125,7 +149,10 @@ mod tests { // Load the session back let loaded_messages = load_session_messages(&project_path, &session.id).await; - assert!(loaded_messages.is_ok(), "Should load session messages successfully"); + assert!( + loaded_messages.is_ok(), + "Should load session messages successfully" + ); let loaded = loaded_messages.unwrap(); assert_eq!(loaded.len(), 2, "Should load correct number of messages"); @@ -149,12 +176,20 @@ mod tests { ]; // Save first session - let sessions1 = group_messages_into_sessions(messages1.clone()).await.unwrap(); - save_chat_session(&project_path, &sessions1[0], &messages1).await.unwrap(); + let sessions1 = group_messages_into_sessions(messages1.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &sessions1[0], &messages1) + .await + .unwrap(); // Save second session - let sessions2 = group_messages_into_sessions(messages2.clone()).await.unwrap(); - save_chat_session(&project_path, &sessions2[0], &messages2).await.unwrap(); + let sessions2 = group_messages_into_sessions(messages2.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &sessions2[0], &messages2) + .await + .unwrap(); // Load sessions index let sessions_list = load_chat_sessions(&project_path, None, None).await; @@ -173,27 +208,35 @@ mod tests { let project_path = temp_dir.path().to_string_lossy().to_string(); // Create sessions for different agents - let claude_messages = vec![ - create_test_message("user", "Claude message", "claude", 0), - ]; + let claude_messages = vec![create_test_message("user", "Claude message", "claude", 0)]; - let codex_messages = vec![ - create_test_message("user", "Codex message", "codex", 600), - ]; + let codex_messages = vec![create_test_message("user", "Codex message", "codex", 600)]; // Save sessions - let claude_sessions = group_messages_into_sessions(claude_messages.clone()).await.unwrap(); - save_chat_session(&project_path, &claude_sessions[0], &claude_messages).await.unwrap(); - - let codex_sessions = group_messages_into_sessions(codex_messages.clone()).await.unwrap(); - save_chat_session(&project_path, &codex_sessions[0], &codex_messages).await.unwrap(); + let claude_sessions = group_messages_into_sessions(claude_messages.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &claude_sessions[0], &claude_messages) + .await + .unwrap(); + + let codex_sessions = group_messages_into_sessions(codex_messages.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &codex_sessions[0], &codex_messages) + .await + .unwrap(); // Filter by agent - let claude_only = load_chat_sessions(&project_path, None, Some("claude".to_string())).await.unwrap(); + let claude_only = load_chat_sessions(&project_path, None, Some("claude".to_string())) + .await + .unwrap(); assert_eq!(claude_only.len(), 1); assert_eq!(claude_only[0].agent, "claude"); - let codex_only = load_chat_sessions(&project_path, None, Some("codex".to_string())).await.unwrap(); + let codex_only = load_chat_sessions(&project_path, None, Some("codex".to_string())) + .await + .unwrap(); assert_eq!(codex_only.len(), 1); assert_eq!(codex_only[0].agent, "codex"); } @@ -205,15 +248,24 @@ mod tests { // Create multiple sessions for i in 0..5 { - let messages = vec![ - create_test_message("user", &format!("Message {}", i), "claude", i * 600), - ]; - let sessions = group_messages_into_sessions(messages.clone()).await.unwrap(); - save_chat_session(&project_path, &sessions[0], &messages).await.unwrap(); + let messages = vec![create_test_message( + "user", + &format!("Message {}", i), + "claude", + i * 600, + )]; + let sessions = group_messages_into_sessions(messages.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &sessions[0], &messages) + .await + .unwrap(); } // Test limit - let limited = load_chat_sessions(&project_path, Some(3), None).await.unwrap(); + let limited = load_chat_sessions(&project_path, Some(3), None) + .await + .unwrap(); assert_eq!(limited.len(), 3, "Should respect limit parameter"); } @@ -222,15 +274,17 @@ mod tests { let temp_dir = create_test_project_dir(); let project_path = temp_dir.path().to_string_lossy().to_string(); - let messages = vec![ - create_test_message("user", "To be deleted", "claude", 0), - ]; + let messages = vec![create_test_message("user", "To be deleted", "claude", 0)]; - let sessions = group_messages_into_sessions(messages.clone()).await.unwrap(); + let sessions = group_messages_into_sessions(messages.clone()) + .await + .unwrap(); let session_id = sessions[0].id.clone(); // Save session - save_chat_session(&project_path, &sessions[0], &messages).await.unwrap(); + save_chat_session(&project_path, &sessions[0], &messages) + .await + .unwrap(); // Verify it exists let before_delete = load_chat_sessions(&project_path, None, None).await.unwrap(); @@ -246,7 +300,10 @@ mod tests { // Verify session file is also deleted let session_file_result = load_session_messages(&project_path, &session_id).await; - assert!(session_file_result.is_err(), "Session file should be deleted"); + assert!( + session_file_result.is_err(), + "Session file should be deleted" + ); } #[tokio::test] @@ -256,13 +313,19 @@ mod tests { let content_with_files = "Let me check the src/main.rs and tests/mod.rs files for you."; let mut message = create_test_message("user", content_with_files, "claude", 0); - + // Extract file mentions message.metadata.file_mentions = extract_file_mentions(content_with_files); assert_eq!(message.metadata.file_mentions.len(), 2); - assert!(message.metadata.file_mentions.contains(&"src/main.rs".to_string())); - assert!(message.metadata.file_mentions.contains(&"tests/mod.rs".to_string())); + assert!(message + .metadata + .file_mentions + .contains(&"src/main.rs".to_string())); + assert!(message + .metadata + .file_mentions + .contains(&"tests/mod.rs".to_string())); } #[tokio::test] @@ -270,14 +333,27 @@ mod tests { let temp_dir = create_test_project_dir(); let messages = vec![ - create_test_message("user", "Can you help me implement a sorting algorithm?", "claude", 0), - create_test_message("assistant", "I'll help you implement quicksort", "claude", 60), + create_test_message( + "user", + "Can you help me implement a sorting algorithm?", + "claude", + 0, + ), + create_test_message( + "assistant", + "I'll help you implement quicksort", + "claude", + 60, + ), ]; let sessions = group_messages_into_sessions(messages).await.unwrap(); let session = &sessions[0]; - assert_eq!(session.summary, "Can you help me implement a sorting algorithm?"); + assert_eq!( + session.summary, + "Can you help me implement a sorting algorithm?" + ); } #[tokio::test] @@ -307,7 +383,11 @@ mod tests { // Verify migration created sessions let sessions = load_chat_sessions(&project_path, None, None).await.unwrap(); - assert_eq!(sessions.len(), 1, "Should create one session from legacy data"); + assert_eq!( + sessions.len(), + 1, + "Should create one session from legacy data" + ); assert_eq!(sessions[0].agent, "claude"); assert_eq!(sessions[0].message_count, 2); } @@ -317,16 +397,25 @@ mod tests { let temp_dir = create_test_project_dir(); let project_path = temp_dir.path().to_string_lossy().to_string(); - let messages = vec![ - create_test_message("user", "Cross platform test", "claude", 0), - ]; + let messages = vec![create_test_message( + "user", + "Cross platform test", + "claude", + 0, + )]; - let sessions = group_messages_into_sessions(messages.clone()).await.unwrap(); - save_chat_session(&project_path, &sessions[0], &messages).await.unwrap(); + let sessions = group_messages_into_sessions(messages.clone()) + .await + .unwrap(); + save_chat_session(&project_path, &sessions[0], &messages) + .await + .unwrap(); // Verify the files are created with correct paths - let commander_dir = PathBuf::from(&project_path).join(".commander").join("chat_history"); - + let commander_dir = PathBuf::from(&project_path) + .join(".commander") + .join("chat_history"); + let index_file = commander_dir.join("sessions_index.json"); assert!(index_file.exists(), "Index file should exist"); @@ -349,7 +438,10 @@ mod tests { assert!(result.is_err(), "Should fail for invalid path"); let load_result = load_chat_sessions(invalid_path, None, None).await; - assert!(load_result.is_err(), "Should fail to load from invalid path"); + assert!( + load_result.is_err(), + "Should fail to load from invalid path" + ); } #[tokio::test] @@ -368,13 +460,17 @@ mod tests { )); } - let sessions = group_messages_into_sessions(messages.clone()).await.unwrap(); + let sessions = group_messages_into_sessions(messages.clone()) + .await + .unwrap(); assert_eq!(sessions.len(), 1, "Should group into single session"); let save_result = save_chat_session(&project_path, &sessions[0], &messages).await; assert!(save_result.is_ok(), "Should handle large session"); - let loaded = load_session_messages(&project_path, &sessions[0].id).await.unwrap(); + let loaded = load_session_messages(&project_path, &sessions[0].id) + .await + .unwrap(); assert_eq!(loaded.len(), 100, "Should load all messages"); } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/commands/git_commands.rs b/src-tauri/src/tests/commands/git_commands.rs index 861efc3..541c729 100644 --- a/src-tauri/src/tests/commands/git_commands.rs +++ b/src-tauri/src/tests/commands/git_commands.rs @@ -1,34 +1,37 @@ #[cfg(test)] mod tests { - use crate::*; - use crate::tests::{create_test_git_project, create_test_regular_project}; use crate::commands::git_commands::is_valid_git_repository; + use crate::tests::{create_test_git_project, create_test_regular_project}; + use crate::*; use std::path::Path; #[tokio::test] async fn test_is_valid_git_repository_with_git_folder() { let (_temp_dir, project_path) = create_test_git_project("test-git-repo"); - + let result = is_valid_git_repository(&project_path); - + assert!(result, "Should detect valid git repository"); } - #[tokio::test] + #[tokio::test] async fn test_is_valid_git_repository_without_git_folder() { let (_temp_dir, project_path) = create_test_regular_project("test-regular-folder"); - + let result = is_valid_git_repository(&project_path); - - assert!(!result, "Should not detect git repository in regular folder"); + + assert!( + !result, + "Should not detect git repository in regular folder" + ); } #[tokio::test] async fn test_is_valid_git_repository_nonexistent_path() { let nonexistent_path = Path::new("/this/path/does/not/exist"); - + let result = is_valid_git_repository(nonexistent_path); - + assert!(!result, "Should return false for nonexistent path"); } @@ -36,9 +39,9 @@ mod tests { async fn test_validate_git_repository_command_valid_repo() { let (_temp_dir, project_path) = create_test_git_project("test-command-valid"); let path_str = project_path.to_string_lossy().to_string(); - + let result = validate_git_repository(path_str).await; - + assert!(result.is_ok(), "Command should succeed for valid git repo"); assert!(result.unwrap(), "Should return true for valid git repo"); } @@ -47,9 +50,9 @@ mod tests { async fn test_validate_git_repository_command_invalid_repo() { let (_temp_dir, project_path) = create_test_regular_project("test-command-invalid"); let path_str = project_path.to_string_lossy().to_string(); - + let result = validate_git_repository(path_str).await; - + assert!(result.is_ok(), "Command should not error for invalid repo"); assert!(!result.unwrap(), "Should return false for non-git folder"); } @@ -57,10 +60,13 @@ mod tests { #[tokio::test] async fn test_validate_git_repository_command_nonexistent_path() { let nonexistent_path = "/this/path/absolutely/does/not/exist".to_string(); - + let result = validate_git_repository(nonexistent_path).await; - - assert!(result.is_ok(), "Command should not error for nonexistent path"); + + assert!( + result.is_ok(), + "Command should not error for nonexistent path" + ); assert!(!result.unwrap(), "Should return false for nonexistent path"); } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/commands/project_commands.rs b/src-tauri/src/tests/commands/project_commands.rs index 7e48095..5a838e5 100644 --- a/src-tauri/src/tests/commands/project_commands.rs +++ b/src-tauri/src/tests/commands/project_commands.rs @@ -1,10 +1,10 @@ #[cfg(test)] mod tests { - use crate::*; - use crate::tests::{create_test_git_project}; use crate::commands::git_commands::is_valid_git_repository; - use tempfile::TempDir; + use crate::tests::create_test_git_project; + use crate::*; use serial_test::serial; + use tempfile::TempDir; // Helper function to create a mock app handle for testing // Note: This is a placeholder - we'll need to implement proper app mocking @@ -19,7 +19,7 @@ mod tests { async fn test_add_project_to_recent_valid_project() { let (_temp_dir, project_path) = create_test_git_project("test-recent-project"); let _path_str = project_path.to_string_lossy().to_string(); - + // This test will need a mock app handle // TODO: Implement when we have proper app mocking // let app = create_mock_app().await; @@ -32,11 +32,14 @@ mod tests { let temp_dir = TempDir::new().expect("Failed to create temp directory"); let projects_folder = temp_dir.path().to_string_lossy().to_string(); let project_name = "unique-project-name".to_string(); - + let result = check_project_name_conflict(projects_folder, project_name).await; - + assert!(result.is_ok(), "Command should succeed"); - assert!(!result.unwrap(), "Should return false for non-conflicting name"); + assert!( + !result.unwrap(), + "Should return false for non-conflicting name" + ); } #[tokio::test] @@ -44,13 +47,13 @@ mod tests { let temp_dir = TempDir::new().expect("Failed to create temp directory"); let projects_folder = temp_dir.path().to_string_lossy().to_string(); let project_name = "existing-project".to_string(); - + // Create the conflicting project directory let conflicting_path = temp_dir.path().join(&project_name); std::fs::create_dir_all(&conflicting_path).expect("Failed to create conflicting directory"); - + let result = check_project_name_conflict(projects_folder, project_name).await; - + assert!(result.is_ok(), "Command should succeed"); assert!(result.unwrap(), "Should return true for conflicting name"); } @@ -58,9 +61,9 @@ mod tests { #[test] fn test_is_valid_git_repository_helper() { let (_temp_dir, project_path) = create_test_git_project("test-helper-function"); - + let result = is_valid_git_repository(&project_path); - + assert!(result, "Helper function should detect valid git repository"); } @@ -69,10 +72,13 @@ mod tests { let temp_dir = TempDir::new().expect("Failed to create temp directory"); let regular_path = temp_dir.path().join("not-a-git-repo"); std::fs::create_dir_all(®ular_path).expect("Failed to create directory"); - + let result = is_valid_git_repository(®ular_path); - - assert!(!result, "Helper function should not detect git in regular folder"); + + assert!( + !result, + "Helper function should not detect git in regular folder" + ); } // Integration test placeholder for the full project creation workflow @@ -81,10 +87,10 @@ mod tests { async fn test_create_project_workflow_integration() { // TODO: Implement full integration test that: // 1. Creates a new project with git - // 2. Verifies it's added to recent projects + // 2. Verifies it's added to recent projects // 3. Verifies it can be listed in recent projects // 4. Cleans up properly - + // This will require proper app handle mocking } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/commands/session_command_args.rs b/src-tauri/src/tests/commands/session_command_args.rs index a175853..3d0fd60 100644 --- a/src-tauri/src/tests/commands/session_command_args.rs +++ b/src-tauri/src/tests/commands/session_command_args.rs @@ -1,11 +1,14 @@ -use crate::commands::session_commands::{terminate_session, send_quit_command_to_session}; +use crate::commands::session_commands::{send_quit_command_to_session, terminate_session}; #[tokio::test] async fn terminate_session_accepts_session_id_and_succeeds_when_missing() { // This ensures the command parameter is correctly named `session_id` and // that calling it with a non-existent session does not error (current design). let res = terminate_session("nonexistent-session".to_string()).await; - assert!(res.is_ok(), "terminate_session should succeed even if session is missing"); + assert!( + res.is_ok(), + "terminate_session should succeed even if session is missing" + ); } #[tokio::test] @@ -13,7 +16,14 @@ async fn send_quit_command_uses_session_id_and_errors_when_missing() { // This ensures the command parameter is correctly named `session_id` and // that the underlying implementation returns a clear error when not found. let res = send_quit_command_to_session("nonexistent-session".to_string()).await; - assert!(res.is_err(), "send_quit_command_to_session should error for missing session"); + assert!( + res.is_err(), + "send_quit_command_to_session should error for missing session" + ); let msg = res.unwrap_err(); - assert!(msg.contains("Session not found"), "Unexpected error message: {}", msg); + assert!( + msg.contains("Session not found"), + "Unexpected error message: {}", + msg + ); } diff --git a/src-tauri/src/tests/error_handling.rs b/src-tauri/src/tests/error_handling.rs index 977af20..f13c77b 100644 --- a/src-tauri/src/tests/error_handling.rs +++ b/src-tauri/src/tests/error_handling.rs @@ -7,16 +7,20 @@ mod tests { #[test] fn test_git_error_creation() { let error = CommanderError::git("clone", "/path/to/repo", "Repository not found"); - + match &error { - CommanderError::Git { operation, path, message } => { + CommanderError::Git { + operation, + path, + message, + } => { assert_eq!(operation, "clone"); assert_eq!(path, "/path/to/repo"); assert_eq!(message, "Repository not found"); } _ => panic!("Expected Git error"), } - + let user_msg = error.user_message(); assert!(user_msg.contains("clone")); assert!(user_msg.contains("/path/to/repo")); @@ -26,31 +30,42 @@ mod tests { #[test] fn test_project_error_creation() { let error = CommanderError::project("create", "MyProject", "Directory already exists"); - + let user_msg = error.user_message(); - assert_eq!(user_msg, "Project operation 'create' failed for 'MyProject': Directory already exists"); + assert_eq!( + user_msg, + "Project operation 'create' failed for 'MyProject': Directory already exists" + ); } #[test] fn test_llm_error_creation() { let error = CommanderError::llm("OpenRouter", "fetch_models", "API key required"); - + let user_msg = error.user_message(); - assert_eq!(user_msg, "OpenRouter operation 'fetch_models' failed: API key required"); + assert_eq!( + user_msg, + "OpenRouter operation 'fetch_models' failed: API key required" + ); } #[test] fn test_validation_error_creation() { - let error = CommanderError::validation("project_name", "invalid/name", "Project name cannot contain slashes"); - + let error = CommanderError::validation( + "project_name", + "invalid/name", + "Project name cannot contain slashes", + ); + let user_msg = error.user_message(); assert_eq!(user_msg, "Invalid value 'invalid/name' for field 'project_name': Project name cannot contain slashes"); } #[test] fn test_network_error_with_status_code() { - let error = CommanderError::network("https://api.example.com", Some(404), "Resource not found"); - + let error = + CommanderError::network("https://api.example.com", Some(404), "Resource not found"); + let user_msg = error.user_message(); assert!(user_msg.contains("404")); assert!(user_msg.contains("https://api.example.com")); @@ -58,8 +73,12 @@ mod tests { #[test] fn test_session_error_with_id() { - let error = CommanderError::session(Some("sess-123".to_string()), "terminate", "Session not found"); - + let error = CommanderError::session( + Some("sess-123".to_string()), + "terminate", + "Session not found", + ); + let user_msg = error.user_message(); assert!(user_msg.contains("sess-123")); assert!(user_msg.contains("terminate")); @@ -68,7 +87,7 @@ mod tests { #[test] fn test_session_error_without_id() { let error = CommanderError::session(None, "list", "No active sessions"); - + let user_msg = error.user_message(); assert!(!user_msg.contains("sess-")); assert!(user_msg.contains("list")); @@ -77,7 +96,7 @@ mod tests { #[test] fn test_command_error_with_exit_code() { let error = CommanderError::command("git status", Some(128), "Not a git repository"); - + let user_msg = error.user_message(); assert!(user_msg.contains("128")); assert!(user_msg.contains("git status")); @@ -87,7 +106,7 @@ mod tests { fn test_error_conversion_to_string() { let error = CommanderError::application("FileManager", "Failed to read directory"); let error_string: String = error.into(); - + assert_eq!(error_string, "FileManager: Failed to read directory"); } @@ -95,25 +114,31 @@ mod tests { fn test_error_display_trait() { let error = CommanderError::configuration("Settings", "Invalid JSON format"); let displayed = format!("{}", error); - - assert_eq!(displayed, "Configuration error in Settings: Invalid JSON format"); + + assert_eq!( + displayed, + "Configuration error in Settings: Invalid JSON format" + ); } #[test] fn test_commander_result_usage() { fn example_function(should_fail: bool) -> CommanderResult { if should_fail { - Err(CommanderError::application("TestFunction", "Simulated failure")) + Err(CommanderError::application( + "TestFunction", + "Simulated failure", + )) } else { Ok("Success".to_string()) } } - + // Test success case let result = example_function(false); assert!(result.is_ok()); assert_eq!(result.unwrap(), "Success"); - + // Test error case let result = example_function(true); assert!(result.is_err()); @@ -124,14 +149,14 @@ mod tests { #[test] fn test_technical_vs_user_messages() { let error = CommanderError::git("push", "/repo", "Authentication failed"); - + let user_msg = error.user_message(); let technical_msg = error.technical_message(); - + // User message should be readable assert!(user_msg.contains("Git operation")); assert!(!user_msg.contains("Git {")); - + // Technical message should contain debug info assert!(technical_msg.contains("Git {")); assert!(technical_msg.contains("operation:")); @@ -140,17 +165,22 @@ mod tests { #[test] fn test_error_serialization() { let error = CommanderError::project("delete", "TestProject", "Project is locked"); - + // Test that error can be serialized to JSON (important for Tauri) let json = serde_json::to_string(&error).expect("Error should be serializable"); assert!(json.contains("Project")); assert!(json.contains("delete")); assert!(json.contains("TestProject")); - + // Test that error can be deserialized back - let deserialized: CommanderError = serde_json::from_str(&json).expect("Error should be deserializable"); + let deserialized: CommanderError = + serde_json::from_str(&json).expect("Error should be deserializable"); match deserialized { - CommanderError::Project { operation, project_name, message } => { + CommanderError::Project { + operation, + project_name, + message, + } => { assert_eq!(operation, "delete"); assert_eq!(project_name, "TestProject"); assert_eq!(message, "Project is locked"); @@ -158,4 +188,4 @@ mod tests { _ => panic!("Deserialized error should be Project variant"), } } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/integration/mod.rs b/src-tauri/src/tests/integration/mod.rs index 4c4d300..9e8cede 100644 --- a/src-tauri/src/tests/integration/mod.rs +++ b/src-tauri/src/tests/integration/mod.rs @@ -1,3 +1,3 @@ // Integration tests -pub mod store_persistence; pub mod new_project_persistence; +pub mod store_persistence; diff --git a/src-tauri/src/tests/integration/new_project_persistence.rs b/src-tauri/src/tests/integration/new_project_persistence.rs index d0bc6cd..828a60b 100644 --- a/src-tauri/src/tests/integration/new_project_persistence.rs +++ b/src-tauri/src/tests/integration/new_project_persistence.rs @@ -3,14 +3,12 @@ #[cfg(all(test, not(target_os = "macos")))] mod tests { use crate::commands::project_commands::{ - create_new_project_with_git, - open_existing_project, - list_recent_projects, + create_new_project_with_git, list_recent_projects, open_existing_project, }; use crate::tests::create_test_git_project; use serial_test::serial; - use tempfile::TempDir; use std::path::PathBuf; + use tempfile::TempDir; fn build_test_app() -> (tauri::App, TempDir) { // Isolate plugin-store path by overriding HOME to a temp dir @@ -35,8 +33,11 @@ mod tests { // Seed existing recent with a real git repo let (_seed_td, seed_path) = create_test_git_project("seed-repo"); let seed_path_str = seed_path.to_string_lossy().to_string(); - let _ = tauri::async_runtime::block_on(open_existing_project(handle.clone(), seed_path_str.clone())) - .expect("seed open should succeed"); + let _ = tauri::async_runtime::block_on(open_existing_project( + handle.clone(), + seed_path_str.clone(), + )) + .expect("seed open should succeed"); // Verify it is listed let recents_before = tauri::async_runtime::block_on(list_recent_projects(handle.clone())) @@ -60,7 +61,11 @@ mod tests { let recents_after = tauri::async_runtime::block_on(list_recent_projects(handle.clone())) .expect("list after should succeed"); - assert_eq!(recents_after.len(), 2, "Should keep existing and add the new project"); + assert_eq!( + recents_after.len(), + 2, + "Should keep existing and add the new project" + ); assert!(recents_after.iter().any(|p| p.path == seed_path_str)); assert!(recents_after.iter().any(|p| p.path == new_path)); } diff --git a/src-tauri/src/tests/integration/store_persistence.rs b/src-tauri/src/tests/integration/store_persistence.rs index bb555b1..6aa1231 100644 --- a/src-tauri/src/tests/integration/store_persistence.rs +++ b/src-tauri/src/tests/integration/store_persistence.rs @@ -1,7 +1,7 @@ #[cfg(all(test, not(target_os = "macos")))] mod tests { - use crate::commands::project_commands::{open_existing_project, list_recent_projects}; - use crate::tests::{create_test_git_project}; + use crate::commands::project_commands::{list_recent_projects, open_existing_project}; + use crate::tests::create_test_git_project; use serial_test::serial; use tempfile::TempDir; @@ -29,8 +29,9 @@ mod tests { let handle = app.handle(); // First open - let rp1 = tauri::async_runtime::block_on(open_existing_project(handle.clone(), path_str.clone())) - .expect("open should succeed"); + let rp1 = + tauri::async_runtime::block_on(open_existing_project(handle.clone(), path_str.clone())) + .expect("open should succeed"); assert_eq!(rp1.path, path_str); assert!(rp1.is_git_repo); @@ -41,8 +42,9 @@ mod tests { assert_eq!(recents1[0].path, path_str); // Reopen same path should dedup and keep len=1 - let rp2 = tauri::async_runtime::block_on(open_existing_project(handle.clone(), path_str.clone())) - .expect("reopen should succeed"); + let rp2 = + tauri::async_runtime::block_on(open_existing_project(handle.clone(), path_str.clone())) + .expect("reopen should succeed"); assert_eq!(rp2.path, path_str); let recents2 = tauri::async_runtime::block_on(list_recent_projects(handle.clone())) diff --git a/src-tauri/src/tests/mod.rs b/src-tauri/src/tests/mod.rs index fb59bd8..fc9a490 100644 --- a/src-tauri/src/tests/mod.rs +++ b/src-tauri/src/tests/mod.rs @@ -1,28 +1,28 @@ // Test module declarations +pub mod chat_history; pub mod commands; -pub mod services; -pub mod integration; pub mod error_handling; -pub mod chat_history; +pub mod integration; +pub mod services; // Common test utilities and helpers -use tempfile::TempDir; use std::path::PathBuf; +use tempfile::TempDir; /// Create a temporary test project with git repository pub fn create_test_git_project(name: &str) -> (TempDir, PathBuf) { let temp_dir = TempDir::new().expect("Failed to create temp directory"); let project_path = temp_dir.path().join(name); - + std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); - + // Initialize git repository std::process::Command::new("git") .args(&["init"]) .current_dir(&project_path) .output() .expect("Failed to initialize git repository"); - + (temp_dir, project_path) } @@ -30,8 +30,8 @@ pub fn create_test_git_project(name: &str) -> (TempDir, PathBuf) { pub fn create_test_regular_project(name: &str) -> (TempDir, PathBuf) { let temp_dir = TempDir::new().expect("Failed to create temp directory"); let project_path = temp_dir.path().join(name); - + std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); - + (temp_dir, project_path) -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/services/agent_status_service.rs b/src-tauri/src/tests/services/agent_status_service.rs new file mode 100644 index 0000000..5aa275d --- /dev/null +++ b/src-tauri/src/tests/services/agent_status_service.rs @@ -0,0 +1,285 @@ +#[cfg(test)] +mod tests { + use std::collections::HashMap; + use std::sync::{Arc, Mutex}; + + use async_trait::async_trait; + + use crate::models::ai_agent::AIAgent; + use crate::services::agent_status_service::{AgentProbe, AgentStatusService}; + + fn all_enabled() -> HashMap { + HashMap::from([ + ("claude".to_string(), true), + ("codex".to_string(), true), + ("gemini".to_string(), true), + ]) + } + + #[derive(Clone, Debug)] + struct FakeCommandInfo { + present: bool, + version: Result, String>, + } + + #[derive(Clone, Debug)] + struct FakeProbe { + commands: HashMap, + latest_packages: HashMap, String>>, + installed_packages: HashMap, String>>, + version_calls: Arc>>, + } + + impl FakeProbe { + fn new() -> Self { + Self { + commands: HashMap::new(), + latest_packages: HashMap::new(), + installed_packages: HashMap::new(), + version_calls: Arc::new(Mutex::new(HashMap::new())), + } + } + + fn with_command( + mut self, + command: &str, + present: bool, + version: Result, String>, + ) -> Self { + self.commands + .insert(command.to_string(), FakeCommandInfo { present, version }); + self + } + + fn with_package(mut self, package: &str, latest: Result, String>) -> Self { + self.latest_packages.insert(package.to_string(), latest); + self + } + + fn with_installed_package( + mut self, + package: &str, + version: Result, String>, + ) -> Self { + self.installed_packages.insert(package.to_string(), version); + self + } + + fn record_version_call(&self, command: &str) { + let mut calls = self.version_calls.lock().unwrap(); + *calls.entry(command.to_string()).or_insert(0) += 1; + } + + fn version_call_count(&self, command: &str) -> usize { + *self + .version_calls + .lock() + .unwrap() + .get(command) + .unwrap_or(&0) + } + } + + #[async_trait] + impl AgentProbe for FakeProbe { + async fn locate(&self, command: &str) -> Result { + let info = self + .commands + .get(command) + .unwrap_or_else(|| panic!("unexpected locate call for {command}")); + Ok(info.present) + } + + async fn command_version(&self, command: &str) -> Result, String> { + self.record_version_call(command); + let info = self + .commands + .get(command) + .unwrap_or_else(|| panic!("unexpected version call for {command}")); + info.version.clone() + } + + async fn latest_package_version(&self, package: &str) -> Result, String> { + self.latest_packages + .get(package) + .unwrap_or_else(|| panic!("unexpected package call for {package}")) + .clone() + } + + async fn installed_package_version(&self, package: &str) -> Result, String> { + self.installed_packages + .get(package) + .unwrap_or_else(|| panic!("unexpected package call for {package}")) + .clone() + } + } + + fn find_agent<'a>(agents: &'a [AIAgent], name: &str) -> &'a AIAgent { + agents + .iter() + .find(|a| a.name == name) + .expect("agent missing") + } + + #[tokio::test] + async fn agents_report_versions_without_false_upgrade_flag() { + let probe = FakeProbe::new() + .with_command("claude", true, Ok(Some("2.0.5 (Claude Code)".to_string()))) + .with_command("codex", true, Ok(Some("codex-cli 0.41.0".to_string()))) + .with_command("gemini", true, Ok(Some("0.6.1".to_string()))) + .with_package("@anthropic-ai/claude-code", Ok(Some("2.0.5".to_string()))) + .with_installed_package("@anthropic-ai/claude-code", Ok(Some("2.0.5".to_string()))) + .with_package("@openai/codex", Ok(Some("0.42.0".to_string()))) + .with_installed_package("@openai/codex", Ok(Some("0.42.0".to_string()))) + .with_package("@google/gemini-cli", Ok(None)); + let probe = + probe.with_installed_package("@google/gemini-cli", Ok(Some("0.6.1".to_string()))); + + let service = AgentStatusService::with_probe(probe.clone()); + let status = service + .check_agents(&all_enabled()) + .await + .expect("status ok"); + + let claude = find_agent(&status.agents, "claude"); + assert!(claude.available, "claude should be marked available"); + assert_eq!( + claude.installed_version.as_deref(), + Some("2.0.5 (Claude Code)") + ); + assert_eq!(claude.latest_version.as_deref(), Some("2.0.5")); + assert!( + !claude.upgrade_available, + "claude should not report upgrade when semver matches" + ); + + let codex = find_agent(&status.agents, "codex"); + assert!(codex.available, "codex should be available"); + assert_eq!( + codex.installed_version.as_deref(), + Some("0.42.0 (CLI reports codex-cli 0.41.0)") + ); + assert_eq!(codex.latest_version.as_deref(), Some("0.42.0")); + assert!(!codex.upgrade_available, "codex should be up to date"); + + let gemini = find_agent(&status.agents, "gemini"); + assert!( + gemini.available, + "gemini should still be considered available" + ); + assert_eq!(gemini.installed_version.as_deref(), Some("0.6.1")); + assert!( + gemini.latest_version.is_none(), + "gemini latest should be unknown without npm data" + ); + assert!( + !gemini.upgrade_available, + "gemini should not require upgrade without comparison" + ); + } + + #[tokio::test] + async fn newer_latest_version_triggers_upgrade_flag() { + let probe = FakeProbe::new() + .with_command("claude", true, Ok(Some("1.0.0".to_string()))) + .with_command("codex", true, Ok(Some("codex-cli 0.40.0".to_string()))) + .with_command("gemini", true, Ok(Some("0.6.0".to_string()))) + .with_package("@anthropic-ai/claude-code", Ok(Some("1.1.0".to_string()))) + .with_installed_package("@anthropic-ai/claude-code", Ok(Some("1.0.0".to_string()))) + .with_package("@openai/codex", Ok(Some("0.41.0".to_string()))) + .with_installed_package("@openai/codex", Ok(Some("0.40.0".to_string()))) + .with_package("@google/gemini-cli", Ok(Some("0.7.0".to_string()))); + let probe = + probe.with_installed_package("@google/gemini-cli", Ok(Some("0.6.0".to_string()))); + + let service = AgentStatusService::with_probe(probe); + let status = service + .check_agents(&all_enabled()) + .await + .expect("status ok"); + + let claude = find_agent(&status.agents, "claude"); + assert!( + claude.upgrade_available, + "claude should request upgrade when npm newer" + ); + + let codex = find_agent(&status.agents, "codex"); + assert!( + codex.upgrade_available, + "codex should request upgrade when npm newer" + ); + + let gemini = find_agent(&status.agents, "gemini"); + assert!( + gemini.upgrade_available, + "gemini should request upgrade when npm newer" + ); + } + + #[tokio::test] + async fn missing_agent_surfaces_error_message() { + let probe = FakeProbe::new() + .with_command("claude", true, Ok(Some("1.0.0".to_string()))) + .with_command("codex", false, Err("command failed".to_string())) + .with_command("gemini", true, Ok(Some("0.8.0".to_string()))) + .with_package("@anthropic-ai/claude-code", Ok(None)) + .with_installed_package("@anthropic-ai/claude-code", Ok(None)) + .with_package("@openai/codex", Ok(Some("1.5.0".to_string()))) + .with_installed_package("@openai/codex", Ok(Some("1.5.0".to_string()))) + .with_package("@google/gemini-cli", Ok(None)); + let probe = + probe.with_installed_package("@google/gemini-cli", Ok(Some("0.8.0".to_string()))); + + let service = AgentStatusService::with_probe(probe); + let status = service + .check_agents(&all_enabled()) + .await + .expect("status ok"); + + let codex = find_agent(&status.agents, "codex"); + assert!( + !codex.available, + "codex should be unavailable when command missing" + ); + let message = codex + .error_message + .as_deref() + .expect("error message present"); + assert!( + message.contains("not found") || message.contains("command failed"), + "unexpected error message: {message}" + ); + assert!(codex.installed_version.is_none()); + assert!( + codex.upgrade_available, + "missing agent should be treated as needing upgrade" + ); + } + + #[tokio::test] + async fn disabled_agents_are_not_probed() { + let mut enabled = all_enabled(); + enabled.insert("codex".to_string(), false); + + let probe = FakeProbe::new() + .with_command("claude", true, Ok(Some("1.0.0".to_string()))) + .with_command("gemini", true, Ok(Some("0.9.0".to_string()))) + .with_package("@anthropic-ai/claude-code", Ok(None)) + .with_installed_package("@anthropic-ai/claude-code", Ok(Some("1.0.0".to_string()))) + .with_package("@google/gemini-cli", Ok(None)) + .with_installed_package("@google/gemini-cli", Ok(Some("0.9.0".to_string()))); + + let service = AgentStatusService::with_probe(probe.clone()); + let status = service.check_agents(&enabled).await.expect("status ok"); + + let codex = find_agent(&status.agents, "codex"); + assert!(!codex.enabled, "codex should be marked disabled"); + assert!(!codex.available, "disabled agent should not be available"); + assert_eq!( + probe.version_call_count("codex"), + 0, + "disabled agent should not trigger version probe" + ); + } +} diff --git a/src-tauri/src/tests/services/cli_command_builder.rs b/src-tauri/src/tests/services/cli_command_builder.rs new file mode 100644 index 0000000..e9ba0b7 --- /dev/null +++ b/src-tauri/src/tests/services/cli_command_builder.rs @@ -0,0 +1,55 @@ +use crate::models::ai_agent::AgentSettings; +use crate::services::cli_command_builder::build_codex_command_args; +use crate::services::execution_mode_service::ExecutionMode; + +#[test] +fn codex_args_include_exec_and_prompt() { + let settings = AgentSettings { + model: None, + ..Default::default() + }; + + let args = build_codex_command_args("how are you?", None, false, Some(&settings)); + + assert_eq!(args.first().map(String::as_str), Some("exec")); + assert!(args.contains(&"how are you?".to_string()), "prompt should be included in args"); + assert!(args.contains(&"--skip-git-repo-check".to_string())); +} + +#[test] +fn codex_args_include_model_flag_when_configured() { + let settings = AgentSettings { + model: Some("o3".to_string()), + ..Default::default() + }; + + let args = build_codex_command_args("generate", None, false, Some(&settings)); + + assert!(args.windows(2).any(|pair| pair == ["--model", "o3"])); +} + +#[test] +fn codex_args_include_execution_mode_flags() { + let settings = AgentSettings::default(); + + let args = build_codex_command_args( + "do something", + Some(ExecutionMode::Collab), + false, + Some(&settings), + ); + + assert!(args.contains(&"--sandbox".to_string())); + assert!(args.contains(&"workspace-write".to_string())); + assert!(args.contains(&"--skip-git-repo-check".to_string())); +} + +#[test] +fn codex_args_include_unsafe_full_toggle() { + let settings = AgentSettings::default(); + + let args = + build_codex_command_args("run full", Some(ExecutionMode::Full), true, Some(&settings)); + + assert!(args.contains(&"--dangerously-bypass-approvals-and-sandbox".to_string())); +} diff --git a/src-tauri/src/tests/services/cli_output_service.rs b/src-tauri/src/tests/services/cli_output_service.rs new file mode 100644 index 0000000..96aaa95 --- /dev/null +++ b/src-tauri/src/tests/services/cli_output_service.rs @@ -0,0 +1,89 @@ +use crate::services::cli_output_service::{sanitize_cli_output_line, CodexStreamAccumulator}; + +#[test] +fn filters_node_circular_dependency_warnings_for_codex() { + let warning = "(node:47953) Warning: Accessing non-existent property 'lineno' of module exports inside circular dependency"; + assert!(sanitize_cli_output_line("codex", warning).is_none()); + + let filename_warning = "(node:47953) Warning: Accessing non-existent property 'filename' of module exports inside circular dependency"; + assert!(sanitize_cli_output_line("codex", filename_warning).is_none()); +} + +#[test] +fn filters_trace_warnings_hint_for_codex() { + let hint = "(Use `node --trace-warnings ...` to show where the warning was created)"; + assert!(sanitize_cli_output_line("codex", hint).is_none()); +} + +#[test] +fn keeps_legitimate_error_output() { + let err_line = "npm ERR! missing script: start"; + assert_eq!( + sanitize_cli_output_line("codex", err_line), + Some(err_line.to_string()) + ); +} + +#[test] +fn leaves_other_agents_output_untouched() { + let warning = "(node:47953) Warning: Accessing non-existent property 'lineno' of module exports inside circular dependency"; + assert_eq!( + sanitize_cli_output_line("claude", warning), + Some(warning.to_string()) + ); +} + +#[test] +fn codex_stream_accumulator_emits_on_carriage_return() { + let mut acc = CodexStreamAccumulator::new(); + + let chunks = acc.push_chunk("{\"type\":\"item.started\"}\r"); + assert_eq!(chunks, vec!["{\"type\":\"item.started\"}".to_string()]); + + let chunks = acc.push_chunk("{\"type\":\"item.completed\"}\r\n"); + assert_eq!(chunks, vec!["{\"type\":\"item.completed\"}".to_string()]); +} + +#[test] +fn codex_stream_accumulator_buffers_partial_chunks() { + let mut acc = CodexStreamAccumulator::new(); + + let chunks = acc.push_chunk("{\"type\":\"item"); + assert!(chunks.is_empty(), "incomplete JSON should be buffered"); + + let chunks = acc.push_chunk(".started\"}\r"); + assert_eq!(chunks, vec!["{\"type\":\"item.started\"}".to_string()]); + + assert!(acc.flush().is_none(), "buffer should be empty after flush"); +} + +#[test] +fn codex_stream_accumulator_flushes_trailing_content() { + let mut acc = CodexStreamAccumulator::new(); + + acc.push_chunk("{\"type\":\"item.started\"}"); + assert_eq!(acc.flush(), Some("{\"type\":\"item.started\"}".to_string())); +} + +#[test] +fn codex_stream_accumulator_strips_sse_envelope() { + let mut acc = CodexStreamAccumulator::new(); + + // event/id lines should be ignored entirely + let chunks = acc.push_chunk("event: thread.started\r\n"); + assert!(chunks.is_empty()); + + let payload = "data: {\"type\":\"item.completed\",\"item\":{\"type\":\"agent_message\",\"text\":\"hello\"}}\r\n"; + let chunks = acc.push_chunk(payload); + assert_eq!( + chunks, + vec![ + "{\"type\":\"item.completed\",\"item\":{\"type\":\"agent_message\",\"text\":\"hello\"}}" + .to_string() + ] + ); + + // [DONE] should be swallowed without producing output + let chunks = acc.push_chunk("data: [DONE]\r\n"); + assert!(chunks.is_empty()); +} diff --git a/src-tauri/src/tests/services/codex_sdk_service.rs b/src-tauri/src/tests/services/codex_sdk_service.rs new file mode 100644 index 0000000..daa7553 --- /dev/null +++ b/src-tauri/src/tests/services/codex_sdk_service.rs @@ -0,0 +1,33 @@ +use crate::services::codex_sdk_service::build_codex_thread_prefs; +use crate::services::execution_mode_service::ExecutionMode; + +#[test] +fn chat_mode_maps_to_read_only_sandbox() { + let prefs = build_codex_thread_prefs(Some(ExecutionMode::Chat), false); + assert_eq!(prefs.sandbox_mode.as_deref(), Some("read-only")); + assert!(prefs.skip_git_repo_check); +} + +#[test] +fn collab_mode_maps_to_workspace_write() { + let prefs = build_codex_thread_prefs(Some(ExecutionMode::Collab), false); + assert_eq!(prefs.sandbox_mode.as_deref(), Some("workspace-write")); +} + +#[test] +fn full_mode_uses_workspace_write_by_default() { + let prefs = build_codex_thread_prefs(Some(ExecutionMode::Full), false); + assert_eq!(prefs.sandbox_mode.as_deref(), Some("workspace-write")); +} + +#[test] +fn full_mode_with_bypass_disables_sandbox() { + let prefs = build_codex_thread_prefs(Some(ExecutionMode::Full), true); + assert_eq!(prefs.sandbox_mode.as_deref(), Some("danger-full-access")); +} + +#[test] +fn none_defaults_to_workspace_write() { + let prefs = build_codex_thread_prefs(None, false); + assert_eq!(prefs.sandbox_mode.as_deref(), Some("workspace-write")); +} diff --git a/src-tauri/src/tests/services/execution_mode_service.rs b/src-tauri/src/tests/services/execution_mode_service.rs index 47e211b..372daf7 100644 --- a/src-tauri/src/tests/services/execution_mode_service.rs +++ b/src-tauri/src/tests/services/execution_mode_service.rs @@ -1,21 +1,17 @@ #[cfg(test)] mod tests { - use crate::services::execution_mode_service::{ExecutionMode, codex_flags_for_mode}; + use crate::services::execution_mode_service::{codex_flags_for_mode, ExecutionMode}; #[test] fn test_codex_flags_chat_mode() { let flags = codex_flags_for_mode(ExecutionMode::Chat, false); - assert_eq!(flags, vec![ - "--sandbox", "read-only", "--ask-for-approval", "never" - ]); + assert_eq!(flags, vec!["--sandbox", "read-only"]); } #[test] fn test_codex_flags_collab_mode() { let flags = codex_flags_for_mode(ExecutionMode::Collab, false); - assert_eq!(flags, vec![ - "--sandbox", "workspace-write", "--ask-for-approval", "on-request" - ]); + assert_eq!(flags, vec!["--sandbox", "workspace-write"]); } #[test] diff --git a/src-tauri/src/tests/services/file_service.rs b/src-tauri/src/tests/services/file_service.rs index 508a9c9..470394a 100644 --- a/src-tauri/src/tests/services/file_service.rs +++ b/src-tauri/src/tests/services/file_service.rs @@ -23,4 +23,3 @@ mod tests { assert!(err.contains("Failed to read file")); } } - diff --git a/src-tauri/src/tests/services/git_service_enhanced.rs b/src-tauri/src/tests/services/git_service_enhanced.rs index ade2c74..7ec4568 100644 --- a/src-tauri/src/tests/services/git_service_enhanced.rs +++ b/src-tauri/src/tests/services/git_service_enhanced.rs @@ -4,16 +4,16 @@ mod tests { use crate::tests::{create_test_git_project, create_test_regular_project}; use std::fs; use std::path::Path; - use tempfile::TempDir; use std::process::Command as StdCommand; + use tempfile::TempDir; #[tokio::test] async fn test_find_git_root_regular_repo() { let (_temp_dir, project_path) = create_test_git_project("test-git-root"); let path_str = project_path.to_string_lossy().to_string(); - + let result = git_service::find_git_root(&path_str); - + assert!(result.is_some(), "Should find git root for regular repo"); let root = result.unwrap(); assert_eq!(Path::new(&root), project_path); @@ -22,9 +22,9 @@ mod tests { #[tokio::test] async fn test_find_git_root_nonexistent_path() { let nonexistent_path = "/this/path/does/not/exist"; - + let result = git_service::find_git_root(nonexistent_path); - + assert!(result.is_none(), "Should return None for nonexistent path"); } @@ -32,9 +32,9 @@ mod tests { async fn test_find_git_root_non_git_directory() { let (_temp_dir, project_path) = create_test_regular_project("test-non-git"); let path_str = project_path.to_string_lossy().to_string(); - + let result = git_service::find_git_root(&path_str); - + assert!(result.is_none(), "Should return None for non-git directory"); } @@ -42,9 +42,9 @@ mod tests { async fn test_resolve_git_project_path_regular_repo() { let (_temp_dir, project_path) = create_test_git_project("test-resolve-regular"); let path_str = project_path.to_string_lossy().to_string(); - + let result = git_service::resolve_git_project_path(&path_str); - + assert!(result.is_some(), "Should resolve regular git repository"); assert_eq!(result.unwrap(), path_str); } @@ -53,9 +53,9 @@ mod tests { async fn test_resolve_git_project_path_non_git_directory() { let (_temp_dir, project_path) = create_test_regular_project("test-resolve-non-git"); let path_str = project_path.to_string_lossy().to_string(); - + let result = git_service::resolve_git_project_path(&path_str); - + assert!(result.is_none(), "Should return None for non-git directory"); } @@ -65,65 +65,111 @@ mod tests { let temp_dir = TempDir::new().unwrap(); let main_repo = temp_dir.path().join("main"); fs::create_dir_all(&main_repo).unwrap(); - + // Initialize git repo init_git_repo(&main_repo); - + // Create a worktree let worktree_path = temp_dir.path().join("worktree"); let output = StdCommand::new("git") .current_dir(&main_repo) - .args(["worktree", "add", "-b", "feature", worktree_path.to_str().unwrap()]) + .args([ + "worktree", + "add", + "-b", + "feature", + worktree_path.to_str().unwrap(), + ]) .output() .unwrap(); - + assert!(output.status.success(), "Failed to create worktree"); - + // Test resolve on the worktree let worktree_str = worktree_path.to_string_lossy().to_string(); let result = git_service::resolve_git_project_path(&worktree_str); - + // Should resolve to either the main repo or fallback to git command result - assert!(result.is_some(), "Should resolve worktree to main repository or itself"); - + assert!( + result.is_some(), + "Should resolve worktree to main repository or itself" + ); + // The result should be a valid git repository path let resolved_path = result.unwrap(); - assert!(Path::new(&resolved_path).exists(), "Resolved path should exist"); + assert!( + Path::new(&resolved_path).exists(), + "Resolved path should exist" + ); } fn init_git_repo(dir: &std::path::Path) { // git init - assert!(StdCommand::new("git").arg("init").current_dir(dir).status().unwrap().success()); + assert!(StdCommand::new("git") + .arg("init") + .current_dir(dir) + .status() + .unwrap() + .success()); // user config (local) - let _ = StdCommand::new("git").args(["config","user.name","Test"]).current_dir(dir).status(); - let _ = StdCommand::new("git").args(["config","user.email","test@example.com"]).current_dir(dir).status(); + let _ = StdCommand::new("git") + .args(["config", "user.name", "Test"]) + .current_dir(dir) + .status(); + let _ = StdCommand::new("git") + .args(["config", "user.email", "test@example.com"]) + .current_dir(dir) + .status(); // initial commit fs::write(dir.join("README.md"), "# test\n").unwrap(); - assert!(StdCommand::new("git").args(["add","."]).current_dir(dir).status().unwrap().success()); - assert!(StdCommand::new("git").args(["commit","-m","init"]).current_dir(dir).status().unwrap().success()); + assert!(StdCommand::new("git") + .args(["add", "."]) + .current_dir(dir) + .status() + .unwrap() + .success()); + assert!(StdCommand::new("git") + .args(["commit", "-m", "init"]) + .current_dir(dir) + .status() + .unwrap() + .success()); // ensure main branch - let _ = StdCommand::new("git").args(["branch","-M","main"]).current_dir(dir).status(); + let _ = StdCommand::new("git") + .args(["branch", "-M", "main"]) + .current_dir(dir) + .status(); } #[tokio::test] async fn test_resolve_git_project_path_subdir_of_repo() { let (_temp_dir, project_path) = create_test_git_project("test-resolve-subdir"); - + // Create a subdirectory let subdir = project_path.join("src").join("components"); fs::create_dir_all(&subdir).unwrap(); - + let subdir_str = subdir.to_string_lossy().to_string(); let result = git_service::resolve_git_project_path(&subdir_str); - + // Should return None because the subdirectory itself doesn't have .git - assert!(result.is_none(), "Subdirectory without .git should return None"); - + assert!( + result.is_none(), + "Subdirectory without .git should return None" + ); + // But find_git_root should work from the subdirectory let root_result = git_service::find_git_root(&subdir_str); - assert!(root_result.is_some(), "find_git_root should work from subdirectory"); - + assert!( + root_result.is_some(), + "find_git_root should work from subdirectory" + ); + let root = root_result.unwrap(); - assert_eq!(Path::new(&root), project_path, "Should find the main repository root"); + assert_eq!( + Path::new(&root), + project_path, + "Should find the main repository root" + ); } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/services/mod.rs b/src-tauri/src/tests/services/mod.rs index f5870dc..0921c53 100644 --- a/src-tauri/src/tests/services/mod.rs +++ b/src-tauri/src/tests/services/mod.rs @@ -1,6 +1,10 @@ // Service-specific tests -pub mod recent_projects; +pub mod agent_status_service; +pub mod cli_command_builder; +pub mod cli_output_service; +pub mod codex_sdk_service; +pub mod execution_mode_service; pub mod file_service; -pub mod prompt_service; pub mod git_service_enhanced; -pub mod execution_mode_service; +pub mod prompt_service; +pub mod recent_projects; diff --git a/src-tauri/src/tests/services/prompt_service.rs b/src-tauri/src/tests/services/prompt_service.rs index 132b326..2935bc0 100644 --- a/src-tauri/src/tests/services/prompt_service.rs +++ b/src-tauri/src/tests/services/prompt_service.rs @@ -1,35 +1,35 @@ use crate::models::*; use crate::services::prompt_service::*; -use std::collections::HashMap; use serial_test::serial; +use std::collections::HashMap; #[tokio::test] #[serial] async fn test_get_default_prompts() { let config = get_default_prompts(); - + // Test basic structure assert!(config.version > 0); assert!(!config.categories.is_empty()); assert!(!config.prompts.is_empty()); - + // Test specific categories exist assert!(config.categories.contains_key("plan_mode")); assert!(config.categories.contains_key("agent_execution")); assert!(config.categories.contains_key("code_analysis")); - + // Test category properties let plan_mode = config.categories.get("plan_mode").unwrap(); assert_eq!(plan_mode.name, "Plan Mode"); assert!(plan_mode.enabled); assert!(!plan_mode.description.is_empty()); - + // Test prompts structure assert!(config.prompts.contains_key("plan_mode")); let plan_prompts = config.prompts.get("plan_mode").unwrap(); assert!(plan_prompts.contains_key("system")); assert!(plan_prompts.contains_key("user_context")); - + // Test specific prompt properties let system_prompt = plan_prompts.get("system").unwrap(); assert_eq!(system_prompt.name, "Plan Generation System Prompt"); @@ -37,7 +37,7 @@ async fn test_get_default_prompts() { assert!(!system_prompt.content.is_empty()); assert!(system_prompt.created_at > 0); assert!(system_prompt.updated_at > 0); - + // Test user context prompt has variables let user_context = plan_prompts.get("user_context").unwrap(); assert!(!user_context.variables.is_empty()); @@ -57,11 +57,11 @@ async fn test_prompt_template_render() { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let mut variables = HashMap::new(); variables.insert("name".to_string(), "Alice".to_string()); variables.insert("project".to_string(), "Commander".to_string()); - + let rendered = prompt.render(&variables); assert_eq!(rendered, "Hello Alice, welcome to Commander!"); } @@ -72,13 +72,14 @@ async fn test_prompt_template_extract_variables() { let prompt = PromptTemplate { name: "Test Prompt".to_string(), description: "Test description".to_string(), - content: "User: {{user_request}}\nContext: {{context}}\nRepeat: {{user_request}}".to_string(), + content: "User: {{user_request}}\nContext: {{context}}\nRepeat: {{user_request}}" + .to_string(), category: "test".to_string(), variables: vec![], created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let extracted = prompt.extract_variables(); assert_eq!(extracted.len(), 2); // Should not duplicate user_request assert!(extracted.contains(&"user_request".to_string())); @@ -97,19 +98,19 @@ async fn test_prompt_template_validate_variables() { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + // Test successful validation let mut valid_variables = HashMap::new(); valid_variables.insert("name".to_string(), "Alice".to_string()); valid_variables.insert("role".to_string(), "admin".to_string()); - + assert!(prompt.validate_variables(&valid_variables).is_ok()); - + // Test missing variable let mut invalid_variables = HashMap::new(); invalid_variables.insert("name".to_string(), "Alice".to_string()); // Missing "role" - + let result = prompt.validate_variables(&invalid_variables); assert!(result.is_err()); let missing = result.unwrap_err(); @@ -121,16 +122,16 @@ async fn test_prompt_template_validate_variables() { #[serial] async fn test_prompts_config_get_prompt() { let config = get_default_prompts(); - + // Test successful retrieval let prompt = config.get_prompt("plan_mode", "system"); assert!(prompt.is_some()); assert_eq!(prompt.unwrap().name, "Plan Generation System Prompt"); - + // Test non-existent category let prompt = config.get_prompt("non_existent", "system"); assert!(prompt.is_none()); - + // Test non-existent prompt in existing category let prompt = config.get_prompt("plan_mode", "non_existent"); assert!(prompt.is_none()); @@ -140,12 +141,12 @@ async fn test_prompts_config_get_prompt() { #[serial] async fn test_prompts_config_get_category_prompts() { let config = get_default_prompts(); - + // Test successful retrieval let prompts = config.get_category_prompts("plan_mode"); assert!(prompts.is_some()); assert!(!prompts.unwrap().is_empty()); - + // Test non-existent category let prompts = config.get_category_prompts("non_existent"); assert!(prompts.is_none()); @@ -155,14 +156,14 @@ async fn test_prompts_config_get_category_prompts() { #[serial] async fn test_prompts_config_get_enabled_categories() { let mut config = get_default_prompts(); - + // All default categories should be enabled let enabled = config.get_enabled_categories(); assert_eq!(enabled.len(), 3); // plan_mode, agent_execution, code_analysis - + // Disable one category config.categories.get_mut("plan_mode").unwrap().enabled = false; - + let enabled = config.get_enabled_categories(); assert_eq!(enabled.len(), 2); assert!(!enabled.iter().any(|(key, _)| key == &"plan_mode")); @@ -173,7 +174,7 @@ async fn test_prompts_config_get_enabled_categories() { async fn test_prompts_config_add_remove_prompt() { let mut config = get_default_prompts(); let initial_count = config.get_category_prompts("plan_mode").unwrap().len(); - + // Test adding a new prompt let new_prompt = PromptTemplate { name: "Test Prompt".to_string(), @@ -184,26 +185,26 @@ async fn test_prompts_config_add_remove_prompt() { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + config.add_prompt("plan_mode".to_string(), "test".to_string(), new_prompt); - + assert_eq!( config.get_category_prompts("plan_mode").unwrap().len(), initial_count + 1 ); assert!(config.get_prompt("plan_mode", "test").is_some()); - + // Test removing the prompt let removed = config.remove_prompt("plan_mode", "test"); assert!(removed.is_some()); assert_eq!(removed.unwrap().name, "Test Prompt"); - + assert_eq!( config.get_category_prompts("plan_mode").unwrap().len(), initial_count ); assert!(config.get_prompt("plan_mode", "test").is_none()); - + // Test removing non-existent prompt let removed = config.remove_prompt("plan_mode", "non_existent"); assert!(removed.is_none()); @@ -215,18 +216,18 @@ async fn test_prompts_config_add_remove_prompt() { #[serial] async fn test_load_and_save_prompts() { let (_temp_dir, app) = create_test_app().await; - + // Test loading default prompts (should return defaults when no store exists) let loaded = load_prompts(&app).await; assert!(loaded.is_ok()); let config = loaded.unwrap(); assert!(config.version > 0); assert!(!config.categories.is_empty()); - + // Test saving prompts let save_result = save_prompts(&app, &config).await; assert!(save_result.is_ok()); - + // Test loading saved prompts let loaded_again = load_prompts(&app).await; assert!(loaded_again.is_ok()); @@ -238,12 +239,12 @@ async fn test_load_and_save_prompts() { #[serial] async fn test_update_prompt() { let (_temp_dir, app) = create_test_app().await; - + // First save default prompts let config = get_default_prompts(); let save_result = save_prompts(&app, &config).await; assert!(save_result.is_ok()); - + // Update a prompt let updated_prompt = PromptTemplate { name: "Updated System Prompt".to_string(), @@ -254,10 +255,10 @@ async fn test_update_prompt() { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let update_result = update_prompt(&app, "plan_mode", "system", &updated_prompt).await; assert!(update_result.is_ok()); - + // Verify the update let loaded = load_prompts(&app).await; assert!(loaded.is_ok()); @@ -265,7 +266,7 @@ async fn test_update_prompt() { let prompt = config.get_prompt("plan_mode", "system").unwrap(); assert_eq!(prompt.name, "Updated System Prompt"); assert_eq!(prompt.content, "Updated content"); - + // Test updating non-existent category let result = update_prompt(&app, "non_existent", "system", &updated_prompt).await; assert!(result.is_err()); @@ -275,26 +276,26 @@ async fn test_update_prompt() { #[serial] async fn test_delete_prompt() { let (_temp_dir, app) = create_test_app().await; - + // First save default prompts let config = get_default_prompts(); let save_result = save_prompts(&app, &config).await; assert!(save_result.is_ok()); - + // Delete a prompt let delete_result = delete_prompt(&app, "plan_mode", "user_context").await; assert!(delete_result.is_ok()); - + // Verify deletion let loaded = load_prompts(&app).await; assert!(loaded.is_ok()); let config = loaded.unwrap(); assert!(config.get_prompt("plan_mode", "user_context").is_none()); - + // Test deleting non-existent prompt let result = delete_prompt(&app, "plan_mode", "non_existent").await; assert!(result.is_err()); - + // Test deleting from non-existent category let result = delete_prompt(&app, "non_existent", "system").await; assert!(result.is_err()); @@ -304,27 +305,27 @@ async fn test_delete_prompt() { #[serial] async fn test_create_category() { let (_temp_dir, app) = create_test_app().await; - + // First save default prompts let config = get_default_prompts(); let save_result = save_prompts(&app, &config).await; assert!(save_result.is_ok()); - + // Create a new category let create_result = create_category(&app, "test_category", "Test category description").await; assert!(create_result.is_ok()); - + // Verify creation let loaded = load_prompts(&app).await; assert!(loaded.is_ok()); let config = loaded.unwrap(); assert!(config.categories.contains_key("test_category")); - + let category = config.categories.get("test_category").unwrap(); assert_eq!(category.name, "test_category"); assert_eq!(category.description, "Test category description"); assert!(category.enabled); - + // Verify empty prompts collection was created let prompts = config.get_category_prompts("test_category"); assert!(prompts.is_some()); @@ -347,15 +348,15 @@ mod test_edge_cases { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let variables = HashMap::new(); let rendered = prompt.render(&variables); assert_eq!(rendered, ""); - + let extracted = prompt.extract_variables(); assert!(extracted.is_empty()); } - + #[test] fn test_malformed_variable_syntax() { let prompt = PromptTemplate { @@ -367,16 +368,16 @@ mod test_edge_cases { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let extracted = prompt.extract_variables(); - + // Based on the implementation, it extracts ["incomplete_var} {{", "valid_var"] // This is expected behavior - it finds the pattern correctly but includes malformed parts assert_eq!(extracted.len(), 2); assert!(extracted.contains(&"valid_var".to_string())); assert!(extracted.contains(&"incomplete_var} {{".to_string())); } - + #[test] fn test_nested_braces() { let prompt = PromptTemplate { @@ -388,10 +389,10 @@ mod test_edge_cases { created_at: chrono::Utc::now().timestamp(), updated_at: chrono::Utc::now().timestamp(), }; - + let extracted = prompt.extract_variables(); // The current implementation should handle this gracefully // It might extract "outer_" or "inner" depending on implementation assert!(!extracted.is_empty()); } -} \ No newline at end of file +} diff --git a/src-tauri/src/tests/services/recent_projects.rs b/src-tauri/src/tests/services/recent_projects.rs index 88e96c1..814484e 100644 --- a/src-tauri/src/tests/services/recent_projects.rs +++ b/src-tauri/src/tests/services/recent_projects.rs @@ -17,10 +17,7 @@ mod tests { #[test] fn test_recent_projects_dedup_and_mru() { // existing list with A (older), B (newer) - let existing = vec![ - rp("A", "/p/A", 100), - rp("B", "/p/B", 200), - ]; + let existing = vec![rp("A", "/p/A", 100), rp("B", "/p/B", 200)]; // upsert A again with newer timestamp -> A should move to front, no duplicates let updated_a = rp("A", "/p/A", 300); @@ -43,12 +40,15 @@ mod tests { // Upsert a new project P21 with latest ts let new_item = rp("P21", "/p/21", 10_000); - let result = project_service::upsert_recent_projects(existing.drain(..).collect(), new_item, 20); + let result = + project_service::upsert_recent_projects(existing.drain(..).collect(), new_item, 20); assert_eq!(result.len(), 20, "List must be capped at 20"); assert_eq!(result[0].path, "/p/21", "Newest project at front"); // Oldest should have been dropped; ensure "/p/0" is not present - assert!(!result.iter().any(|p| p.path == "/p/0"), "Oldest item should be dropped"); + assert!( + !result.iter().any(|p| p.path == "/p/0"), + "Oldest item should be dropped" + ); } } - diff --git a/src/App.tsx b/src/App.tsx index 3a1721c..53225bf 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -151,7 +151,8 @@ function AppContent() { // Open via backend (validates, sets cwd, updates recents w/ dedup) and use returned data const opened = await invoke('open_existing_project', { project_path: selectedPath, projectPath: selectedPath }) setCurrentProject(opened) - + setActiveTab('chat') + // Refresh projects list if (projectsRefreshRef.current?.refresh) { projectsRefreshRef.current.refresh() @@ -190,10 +191,11 @@ function AppContent() { git_status: 'clean' } setCurrentProject(newProject) + setActiveTab('chat') } const handleProjectSelect = (project: RecentProject) => { - setActiveTab('code') // Start with code tab when project is selected + setActiveTab('chat') // Default to chat tab when project is selected // Ensure backend marks it active and updates recents and use returned project info invoke('open_existing_project', { project_path: project.path, projectPath: project.path }) .then(setCurrentProject) @@ -202,7 +204,7 @@ function AppContent() { const handleBackToWelcome = () => { setCurrentProject(null) - setActiveTab('code') // Reset to code tab when going back to welcome + setActiveTab('chat') // Reset to chat tab when going back to welcome } const toggleChat = () => { @@ -308,7 +310,7 @@ function AppContent() { const recents = await invoke('list_recent_projects') if (recents && recents.length > 0) { setCurrentProject(recents[0]) - setActiveTab('code') // Start with code tab + setActiveTab('chat') // Start with chat tab if (projectsRefreshRef.current?.refresh) { projectsRefreshRef.current.refresh() } @@ -354,7 +356,7 @@ function AppContent() { }) setCurrentProject(opened) - setActiveTab('code') // Start with code tab + setActiveTab('chat') // Start with chat tab // Refresh projects list if (projectsRefreshRef.current?.refresh) { diff --git a/src/components/AIAgentStatusBar.tsx b/src/components/AIAgentStatusBar.tsx index 8c3d7b6..54b8e62 100644 --- a/src/components/AIAgentStatusBar.tsx +++ b/src/components/AIAgentStatusBar.tsx @@ -3,7 +3,7 @@ import { invoke } from '@tauri-apps/api/core'; import { listen } from '@tauri-apps/api/event'; import { useSidebarWidth } from '@/contexts/sidebar-width-context'; import { useSidebar } from '@/components/ui/sidebar'; -import { MessageCircle } from 'lucide-react'; +import { MessageCircle, X } from 'lucide-react'; import { Button } from '@/components/ui/button'; interface AIAgent { @@ -13,6 +13,9 @@ interface AIAgent { available: boolean; enabled: boolean; error_message?: string; + installed_version?: string | null; + latest_version?: string | null; + upgrade_available?: boolean; } interface AgentStatus { @@ -26,16 +29,31 @@ interface AIAgentStatusBarProps { export function AIAgentStatusBar({ onChatToggle, showChatButton }: AIAgentStatusBarProps) { const [agents, setAgents] = useState([]); + const [selectedAgent, setSelectedAgent] = useState(null); const { sidebarWidth } = useSidebarWidth(); const { state } = useSidebar(); + useEffect(() => { + if (!selectedAgent) { + return; + } + + const updated = agents.find(agent => agent.name === selectedAgent.name); + if (updated && updated !== selectedAgent) { + setSelectedAgent(updated); + } + }, [agents, selectedAgent]); + useEffect(() => { // Initial check checkAgents(); // Listen for status updates const unlisten = listen('ai-agent-status', (event) => { - setAgents(event.payload.agents); + const status = event.payload; + if (status && Array.isArray(status.agents)) { + setAgents(status.agents); + } }); // Start monitoring @@ -48,8 +66,12 @@ export function AIAgentStatusBar({ onChatToggle, showChatButton }: AIAgentStatus const checkAgents = async () => { try { - const status = await invoke('check_ai_agents'); - setAgents(status.agents); + const status = await invoke('check_ai_agents'); + if (status && Array.isArray(status.agents)) { + setAgents(status.agents); + } else { + setAgents([]); + } } catch (error) { console.error('Failed to check AI agents:', error); } @@ -119,8 +141,9 @@ export function AIAgentStatusBar({ onChatToggle, showChatButton }: AIAgentStatus return (

setSelectedAgent(current => current?.name === agent.name ? null : agent)} >
Checking... )}
+ + {selectedAgent && ( + setSelectedAgent(null)} + /> + )} +
+ ); +} + +interface AgentVersionCardProps { + agent: AIAgent; + onClose: () => void; +} + +const upgradeHints: Record = { + claude: { + command: 'npm install -g @anthropic-ai/claude-code', + packageName: '@anthropic-ai/claude-code', + }, + codex: { + command: 'npm install -g @openai/codex', + packageName: '@openai/codex', + }, + gemini: { + command: 'npm install -g @google/gemini-cli@latest', + packageName: '@google/gemini-cli', + }, +}; + +function AgentVersionCard({ agent, onClose }: AgentVersionCardProps) { + const hint = upgradeHints[agent.name]; + const installedVersion = agent.installed_version ?? 'Not detected'; + const latestVersion = agent.latest_version ?? 'Unknown'; + const showUpgrade = agent.upgrade_available; + + return ( +
+
+
+

{agent.display_name}

+

Command: {agent.command}

+
+ +
+ +
+ Installed: {installedVersion} + Latest: {latestVersion} + {!agent.available && ( + Agent not detected on PATH. + )} + {agent.error_message && ( + {agent.error_message} + )} +
+ + {hint && ( +
+ {showUpgrade ? ( +

+ New version available — run {hint.command} to upgrade. +

+ ) : ( +

Agent is up to date.

+ )} +
+ )}
); } diff --git a/src/components/ChatInterface.tsx b/src/components/ChatInterface.tsx index 6861c1f..1b58d59 100644 --- a/src/components/ChatInterface.tsx +++ b/src/components/ChatInterface.tsx @@ -23,6 +23,7 @@ import type { ChatMessage } from '@/components/chat/types'; import type { SessionStatus } from '@/components/chat/types'; import { useChatExecution } from '@/components/chat/hooks/useChatExecution'; import { ClaudeStreamParser } from '@/components/chat/stream/claudeStreamParser' +import { CodexStreamParser } from '@/components/chat/codex/streamParser' import { setStepStatus, updateMessagesPlanStep } from '@/components/chat/planStatus'; import { buildAutocompleteOptions } from '@/components/chat/autocomplete'; import type { Plan } from '@/components/chat/plan'; @@ -507,7 +508,34 @@ export function ChatInterface({ isOpen, selectedAgent, project }: ChatInterfaceP setCommandType(null); const permissionMode = planModeEnabled ? 'plan' : 'acceptEdits' - await execute(agentToUse || selectedAgent || 'claude', messageToSend, executionMode, unsafeFull, permissionMode as any) + let approvalMode: 'default' | 'auto_edit' | 'yolo' | undefined + + if (targetId === 'gemini') { + try { + const cliSettings = await invoke('load_agent_cli_settings', { agent: 'gemini', projectPath: project.path }) + const directDefault = cliSettings?.approvalDefault || cliSettings?.approval_default + if (typeof directDefault === 'string') { + const normalized = directDefault.replace('-', '_') as 'default' | 'auto_edit' | 'yolo' + approvalMode = normalized + } else if (agentSettings?.gemini?.auto_approval) { + approvalMode = 'auto_edit' + } + } catch (error) { + console.error('Failed to load Gemini CLI settings:', error) + if (agentSettings?.gemini?.auto_approval) { + approvalMode = 'auto_edit' + } + } + } + + await execute( + agentToUse || selectedAgent || 'claude', + messageToSend, + executionMode, + unsafeFull, + permissionMode as any, + approvalMode + ) }; // Handle plan execution @@ -697,11 +725,10 @@ Please focus only on this step.`; setMessages(prev => prev.map(msg => { if (msg.id !== chunk.session_id) return msg - // Detect if this is a Claude session and chunk looks like JSON stream const agentId = (msg.agent || '').toLowerCase() - const isClaude = agentId.includes('claude') const looksJson = chunk.content.trim().startsWith('{') || chunk.content.includes('"type"') - if (isClaude && looksJson) { + + if (agentId.includes('claude') && looksJson) { let parser = parsers.get(chunk.session_id) if (!parser) { parser = new ClaudeStreamParser('claude') @@ -710,6 +737,23 @@ Please focus only on this step.`; const delta = parser.feed(chunk.content) return { ...msg, content: msg.content + delta, isStreaming: !chunk.finished } } + + if (agentId.includes('codex') && looksJson) { + if (!(window as any).__codexParsers) (window as any).__codexParsers = new Map() + const codexParsers: Map = (window as any).__codexParsers + let parser = codexParsers.get(chunk.session_id) + if (!parser) { + parser = new CodexStreamParser() + codexParsers.set(chunk.session_id, parser) + } + const text = parser.feed(chunk.content) + if (text !== undefined) { + return { ...msg, content: text, isStreaming: !chunk.finished } + } + // If parser returns undefined (filtered event), don't append raw content + return msg + } + return { ...msg, content: msg.content + chunk.content, isStreaming: !chunk.finished } })) if (chunk.finished) { @@ -733,6 +777,10 @@ Please focus only on this step.`; const parsers: Map = (window as any).__claudeParsers parsers?.delete(chunk.session_id) } catch {} + try { + const codexParsers: Map = (window as any).__codexParsers + codexParsers?.delete(chunk.session_id) + } catch {} } }, onError: (message) => { diff --git a/src/components/__tests__/AIAgentStatusBar.versionInfo.test.tsx b/src/components/__tests__/AIAgentStatusBar.versionInfo.test.tsx new file mode 100644 index 0000000..3827095 --- /dev/null +++ b/src/components/__tests__/AIAgentStatusBar.versionInfo.test.tsx @@ -0,0 +1,79 @@ +import { render, screen, fireEvent, waitFor } from '@testing-library/react' +import React from 'react' + +const mockInvoke = vi.fn() + +vi.mock('@tauri-apps/api/core', () => ({ + invoke: mockInvoke, +})) + +const listeners: Record void> = {} + +vi.mock('@tauri-apps/api/event', () => ({ + listen: vi.fn(async (event: string, handler: any) => { + listeners[event] = handler + return () => {} + }), +})) + +vi.mock('@/contexts/sidebar-width-context', () => ({ + useSidebarWidth: () => ({ sidebarWidth: 240 }), +})) + +vi.mock('@/components/ui/sidebar', () => ({ + useSidebar: () => ({ state: 'expanded' }), +})) + +vi.mock('@/components/ui/button', () => ({ + Button: ({ children, ...props }: any) => ( + + ), +})) + +describe('AIAgentStatusBar version info', () => { + beforeEach(() => { + mockInvoke.mockReset() + Object.keys(listeners).forEach((key) => delete listeners[key]) + }) + + it('shows upgrade prompt with version details when agent is clicked', async () => { + mockInvoke.mockImplementation(async (cmd: string) => { + if (cmd === 'check_ai_agents') { + return { + agents: [ + { + name: 'claude', + command: 'claude', + display_name: 'Claude Code CLI', + available: true, + enabled: true, + error_message: null, + installed_version: '1.0.0', + latest_version: '1.2.0', + upgrade_available: true, + }, + ], + } + } + if (cmd === 'monitor_ai_agents') { + return {} + } + throw new Error(`Unexpected command ${cmd}`) + }) + + const { AIAgentStatusBar } = await import('@/components/AIAgentStatusBar') + + render() + + await screen.findByText('Claude Code CLI') + + fireEvent.click(screen.getByText('Claude Code CLI')) + + await waitFor(() => { + expect(screen.getByText(/Installed:/i)).toBeInTheDocument() + expect(screen.getByText(/Latest:/i)).toBeInTheDocument() + expect(screen.getByText(/New version available/i)).toBeInTheDocument() + expect(screen.getByText(/npm install -g @anthropic-ai\/claude-code/i)).toBeInTheDocument() + }) + }) +}) diff --git a/src/components/__tests__/App.projectSelection.chatDefault.test.tsx b/src/components/__tests__/App.projectSelection.chatDefault.test.tsx new file mode 100644 index 0000000..07a71c4 --- /dev/null +++ b/src/components/__tests__/App.projectSelection.chatDefault.test.tsx @@ -0,0 +1,141 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest' +import { render, screen, waitFor, fireEvent } from '@testing-library/react' +import App from '@/App' + +const project = { + name: 'Sample Project', + path: '/projects/sample', + last_accessed: Math.floor(Date.now() / 1000), + is_git_repo: true, + git_branch: 'main', + git_status: 'clean', +} + +const tauriCore = vi.hoisted(() => ({ + invoke: vi.fn(), +})) + +vi.mock('@tauri-apps/api/core', () => tauriCore) +vi.mock('@tauri-apps/api/event', () => ({ listen: vi.fn(async () => () => {}) })) +vi.mock('@/components/ChatInterface', () => ({ ChatInterface: () =>
})) +vi.mock('@/components/CodeView', () => ({ CodeView: () =>
})) +vi.mock('@/components/HistoryView', () => ({ HistoryView: () =>
})) +vi.mock('@/components/AIAgentStatusBar', () => ({ AIAgentStatusBar: () =>
})) +vi.mock('@/components/ui/tabs', () => { + const React = require('react') + const TabsContext = React.createContext<{ value: string; onValueChange?: (value: string) => void } | null>(null) + + const Tabs = ({ value, onValueChange, children }: any) => ( + +
{children}
+
+ ) + + const TabsList = ({ children, ...props }: any) => ( +
{children}
+ ) + + const TabsTrigger = ({ value, children, ...props }: any) => { + const context = React.useContext(TabsContext) + if (!context) { + throw new Error('TabsTrigger must be used within Tabs') + } + const isActive = context.value === value + return ( + + ) + } + + const TabsContent = ({ value, children, forceMount, ...props }: any) => { + const context = React.useContext(TabsContext) + if (!context) { + throw new Error('TabsContent must be used within Tabs') + } + if (!forceMount && context.value !== value) return null + return ( +
+ {children} +
+ ) + } + + return { Tabs, TabsList, TabsTrigger, TabsContent } +}) + +const defaultSettings = { + show_console_output: true, + projects_folder: '', + file_mentions_enabled: true, + show_welcome_recent_projects: true, + chat_send_shortcut: 'mod+enter', + code_settings: { theme: 'github', font_size: 14 }, + ui_theme: 'auto', +} + +if (typeof window !== 'undefined' && !window.matchMedia) { + Object.defineProperty(window, 'matchMedia', { + writable: true, + value: vi.fn().mockImplementation(() => ({ + matches: false, + addEventListener: vi.fn(), + removeEventListener: vi.fn(), + })), + }) +} + +if (typeof document !== 'undefined') describe('App project selection default tab', () => { + beforeEach(() => { + const invoke = tauriCore.invoke as unknown as ReturnType + invoke.mockReset() + invoke.mockImplementation(async (cmd: string) => { + switch (cmd) { + case 'load_app_settings': + return defaultSettings + case 'list_recent_projects': + return [project] + case 'refresh_recent_projects': + return [project] + case 'open_existing_project': + return project + case 'get_cli_project_path': + return null + case 'clear_cli_project_path': + return null + case 'get_user_home_directory': + return '/projects' + case 'set_window_theme': + case 'add_project_to_recent': + case 'save_app_settings': + return null + default: + return null + } + }) + }) + + it('activates the chat tab after selecting a recent project', async () => { + render() + + const projectButton = await screen.findByRole('button', { name: /Sample Project/i }) + fireEvent.click(projectButton) + + const chatTab = await screen.findByRole('tab', { name: /Chat/i }) + + await waitFor(() => { + expect(chatTab).toHaveAttribute('data-state', 'active') + }) + + const codeTab = screen.getByRole('tab', { name: /Code/i }) + expect(codeTab).toHaveAttribute('data-state', 'inactive') + + expect(await screen.findByTestId('chat-interface')).toBeInTheDocument() + }) +}) diff --git a/src/components/chat/MessagesList.tsx b/src/components/chat/MessagesList.tsx index d959660..405b87f 100644 --- a/src/components/chat/MessagesList.tsx +++ b/src/components/chat/MessagesList.tsx @@ -2,6 +2,7 @@ import { User, Bot, Loader2, Copy, Expand, Shrink } from 'lucide-react' import { getAgentId } from '@/components/chat/agents' import { PlanBreakdown } from '@/components/PlanBreakdown' import { AgentResponse } from './AgentResponse' +import { CodexRenderer } from './codex/CodexRenderer' import { useToast } from '@/components/ToastProvider' export interface ChatMessageLike { @@ -63,11 +64,41 @@ export function MessagesList(props: MessagesListProps) { return ( <> - {messages.map((message) => ( + {messages.map((message) => { + const agentId = getAgentId(message.agent) + const isCodex = agentId === 'codex' + const isAssistant = message.role === 'assistant' + + // Codex assistant messages: no box, no padding, full width + if (message.role === 'assistant' && isCodex) { + return ( +
+
+ + Codex + + {new Date(message.timestamp).toLocaleTimeString()} + {message.isStreaming && ( + + )} +
+
+ {(() => { + const content = message.content || '' + if (!content && message.isStreaming) return 'Thinking...' + return + })()} +
+
+ ) + } + + // User messages and non-Codex agents: keep box styling + return (
@@ -109,10 +140,14 @@ export function MessagesList(props: MessagesListProps) { const expanded = expandedMessages.has(message.id) const compact = long && !expanded const containerClass = `whitespace-pre-wrap text-sm ${compact ? 'max-h-[200px] overflow-hidden' : ''}` + return (
{(() => { if (!content && message.isStreaming) return 'Thinking...' + if (message.role === 'assistant' && isCodex) { + return + } return })()}
@@ -159,7 +194,8 @@ export function MessagesList(props: MessagesListProps) { )}
- ))} + ) + })} ) } diff --git a/src/components/chat/__tests__/ChatInterface.agentModes.test.tsx b/src/components/chat/__tests__/ChatInterface.agentModes.test.tsx index 95f6177..81823d0 100644 --- a/src/components/chat/__tests__/ChatInterface.agentModes.test.tsx +++ b/src/components/chat/__tests__/ChatInterface.agentModes.test.tsx @@ -40,7 +40,10 @@ vi.mock('@tauri-apps/api/core', () => ({ const project = { name: 'demo', path: '/tmp/demo', last_accessed: 0, is_git_repo: true, git_branch: 'main', git_status: 'clean' } if (typeof document !== 'undefined') describe('Agent-specific modes in dropdown', () => { - beforeEach(() => { lastArgs = null }) + beforeEach(() => { + lastArgs = null + Element.prototype.scrollIntoView = vi.fn() + }) it('Claude: selecting Plan mode sends permissionMode=plan', async () => { render( @@ -79,4 +82,3 @@ if (typeof document !== 'undefined') describe('Agent-specific modes in dropdown' expect(lastArgs).toHaveProperty('approvalMode') }) }) - diff --git a/src/components/chat/__tests__/ChatInterface.codexStream.test.tsx b/src/components/chat/__tests__/ChatInterface.codexStream.test.tsx new file mode 100644 index 0000000..bfbf630 --- /dev/null +++ b/src/components/chat/__tests__/ChatInterface.codexStream.test.tsx @@ -0,0 +1,111 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest' +import { render, screen, fireEvent, waitFor } from '@testing-library/react' +import { ToastProvider } from '@/components/ToastProvider' +import { ChatInterface } from '@/components/ChatInterface' + +let streamCb: ((e: { payload: { session_id: string; content: string; finished: boolean } }) => void) | null = null + +const mocks = vi.hoisted(() => ({ + invoke: vi.fn(), +})) + +vi.mock('@tauri-apps/api/event', () => { + return { + listen: vi.fn(async (event: string, cb: any) => { + if (event === 'cli-stream') streamCb = cb + return () => {} + }), + } +}) + +const project = { + name: 'demo', + path: '/tmp/demo', + last_accessed: 0, + is_git_repo: true, + git_branch: 'main', + git_status: 'clean', +} + +vi.mock('@tauri-apps/api/core', () => { + const handler = vi.fn(async (cmd: string, args: any) => { + switch (cmd) { + case 'load_all_agent_settings': + return { + claude: { enabled: true, sandbox_mode: false, auto_approval: false, session_timeout_minutes: 30, output_format: 'text', debug_mode: false }, + codex: { enabled: true, sandbox_mode: false, auto_approval: false, session_timeout_minutes: 30, output_format: 'text', debug_mode: false }, + gemini: { enabled: true, sandbox_mode: false, auto_approval: false, session_timeout_minutes: 30, output_format: 'text', debug_mode: false }, + test: { enabled: true, sandbox_mode: false, auto_approval: false, session_timeout_minutes: 30, output_format: 'text', debug_mode: false }, + max_concurrent_sessions: 10, + } + case 'load_agent_settings': + return { claude: true, codex: true, gemini: true, test: true } + case 'get_active_sessions': + return { active_sessions: [], total_sessions: 0 } + case 'load_sub_agents_grouped': + return {} + case 'load_prompts': + return { prompts: {} } + case 'get_git_worktree_preference': + return true + case 'get_git_worktrees': + return [] + case 'save_project_chat': + return null + case 'execute_codex_command': + return null + default: + return null + } + }) + mocks.invoke.mockImplementation(handler) + return { invoke: (...args: any[]) => mocks.invoke(...args) } +}) + +if (typeof document !== 'undefined') describe('ChatInterface codex streaming', () => { + beforeEach(() => { + vi.clearAllMocks() + streamCb = null + mocks.invoke.mockClear() + // jsdom lacks scrollIntoView + // @ts-ignore + Element.prototype.scrollIntoView = vi.fn() + }) + + it('renders agent message content from codex JSON events', async () => { + render( + +
+ {}} selectedAgent={undefined} project={project as any} /> +
+
+ ) + + const input = screen.getByRole('textbox') + fireEvent.change(input, { target: { value: '/codex how are you?' } }) + fireEvent.keyDown(input, { key: 'Enter' }) + + await waitFor(() => { + expect(mocks.invoke).toHaveBeenCalledWith('execute_codex_command', expect.objectContaining({ message: 'how are you?' })) + }) + const call = mocks.invoke.mock.calls.find(([cmd]: [string]) => cmd === 'execute_codex_command') + expect(call).toBeTruthy() + const sessionId = call![1].sessionId as string + expect(typeof sessionId).toBe('string') + + const eventPayload = { + type: 'item.completed', + item: { + type: 'agent_message', + id: 'msg_1', + text: 'Hello from Codex', + }, + } + + streamCb?.({ payload: { session_id: sessionId, content: JSON.stringify(eventPayload), finished: false } }) + + await waitFor(() => { + expect(screen.getByText('Hello from Codex')).toBeInTheDocument() + }) + }) +}) diff --git a/src/components/chat/codex/CodeBlock.tsx b/src/components/chat/codex/CodeBlock.tsx new file mode 100644 index 0000000..53ca45e --- /dev/null +++ b/src/components/chat/codex/CodeBlock.tsx @@ -0,0 +1,141 @@ +import { Button } from '@/components/ui/button' +import { cn } from '@/lib/utils' +import { CheckIcon, CopyIcon } from 'lucide-react' +import type { ComponentProps, HTMLAttributes, ReactNode } from 'react' +import { createContext, useContext, useState } from 'react' +import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter' +import { oneDark, oneLight } from 'react-syntax-highlighter/dist/esm/styles/prism' + +type CodeBlockContextType = { + code: string +} + +const CodeBlockContext = createContext({ + code: '', +}) + +export type CodeBlockProps = HTMLAttributes & { + code: string + language: string + showLineNumbers?: boolean + children?: ReactNode +} + +export const CodeBlock = ({ + code, + language, + showLineNumbers = false, + className, + children, + ...props +}: CodeBlockProps) => ( + +
+
+ + {code} + + + {code} + + {children && ( +
{children}
+ )} +
+
+
+) + +export type CodeBlockCopyButtonProps = ComponentProps & { + onCopy?: () => void + onError?: (error: Error) => void + timeout?: number +} + +export const CodeBlockCopyButton = ({ + onCopy, + onError, + timeout = 2000, + children, + className, + ...props +}: CodeBlockCopyButtonProps) => { + const [isCopied, setIsCopied] = useState(false) + const { code } = useContext(CodeBlockContext) + + const copyToClipboard = async () => { + if (typeof window === 'undefined' || !navigator.clipboard.writeText) { + onError?.(new Error('Clipboard API not available')) + return + } + + try { + await navigator.clipboard.writeText(code) + setIsCopied(true) + onCopy?.() + setTimeout(() => setIsCopied(false), timeout) + } catch (error) { + onError?.(error as Error) + } + } + + const Icon = isCopied ? CheckIcon : CopyIcon + + return ( + + ) +} diff --git a/src/components/chat/codex/CodexRenderer.tsx b/src/components/chat/codex/CodexRenderer.tsx new file mode 100644 index 0000000..492a3ba --- /dev/null +++ b/src/components/chat/codex/CodexRenderer.tsx @@ -0,0 +1,55 @@ +import React from 'react' +import { Response } from './Response' +import { Reasoning, ReasoningContent, ReasoningTrigger } from './Reasoning' + +interface CodexRendererProps { + content: string + isStreaming?: boolean +} + +interface ParsedCodex { + reasoning: string[] + response: string +} + +function parseCodexContent(content: string): ParsedCodex { + const blocks = content.split('\n\n') + const reasoning: string[] = [] + const responseParts: string[] = [] + + for (const block of blocks) { + if (!block.trim()) continue + + // Reasoning blocks are wrapped in underscores (italic markdown) + if (block.startsWith('_') && block.endsWith('_')) { + reasoning.push(block.slice(1, -1).trim()) + } else { + responseParts.push(block) + } + } + + return { + reasoning, + response: responseParts.join('\n\n'), + } +} + +export function CodexRenderer({ content, isStreaming = false }: CodexRendererProps) { + const parsed = parseCodexContent(content) + + return ( +
+ {parsed.reasoning.length > 0 && ( + + + + {parsed.reasoning.map((r, i) => ( +
{r}
+ ))} +
+
+ )} + {parsed.response && {parsed.response}} +
+ ) +} diff --git a/src/components/chat/codex/Reasoning.tsx b/src/components/chat/codex/Reasoning.tsx new file mode 100644 index 0000000..9546e05 --- /dev/null +++ b/src/components/chat/codex/Reasoning.tsx @@ -0,0 +1,159 @@ +import { useControllableState } from '@radix-ui/react-use-controllable-state' +import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible' +import { cn } from '@/lib/utils' +import { ChevronDownIcon } from 'lucide-react' +import type { ComponentProps } from 'react' +import { createContext, memo, useContext, useEffect, useState } from 'react' +import { Response } from './Response' + +type ReasoningContextValue = { + isStreaming: boolean + isOpen: boolean + setIsOpen: (open: boolean) => void + duration: number +} + +const ReasoningContext = createContext(null) + +const useReasoning = () => { + const context = useContext(ReasoningContext) + if (!context) { + throw new Error('Reasoning components must be used within Reasoning') + } + return context +} + +export type ReasoningProps = ComponentProps & { + isStreaming?: boolean + open?: boolean + defaultOpen?: boolean + onOpenChange?: (open: boolean) => void + duration?: number +} + +const AUTO_CLOSE_DELAY = 1000 +const MS_IN_S = 1000 + +export const Reasoning = memo( + ({ + className, + isStreaming = false, + open, + defaultOpen = true, + onOpenChange, + duration: durationProp, + children, + ...props + }: ReasoningProps) => { + const [isOpen, setIsOpen] = useControllableState({ + prop: open, + defaultProp: defaultOpen, + onChange: onOpenChange, + }) + const [duration, setDuration] = useControllableState({ + prop: durationProp, + defaultProp: 0, + }) + + const [hasAutoClosed, setHasAutoClosed] = useState(false) + const [startTime, setStartTime] = useState(null) + + // Track duration when streaming starts and ends + useEffect(() => { + if (isStreaming) { + if (startTime === null) { + setStartTime(Date.now()) + } + } else if (startTime !== null) { + setDuration(Math.ceil((Date.now() - startTime) / MS_IN_S)) + setStartTime(null) + } + }, [isStreaming, startTime, setDuration]) + + // Auto-open when streaming starts, auto-close when streaming ends (once only) + useEffect(() => { + if (defaultOpen && !isStreaming && isOpen && !hasAutoClosed) { + // Add a small delay before closing to allow user to see the content + const timer = setTimeout(() => { + setIsOpen(false) + setHasAutoClosed(true) + }, AUTO_CLOSE_DELAY) + + return () => clearTimeout(timer) + } + }, [isStreaming, isOpen, defaultOpen, setIsOpen, hasAutoClosed]) + + const handleOpenChange = (newOpen: boolean) => { + setIsOpen(newOpen) + } + + return ( + + + {children} + + + ) + } +) + +export type ReasoningTriggerProps = ComponentProps + +const getThinkingMessage = (isStreaming: boolean, duration?: number) => { + if (isStreaming || duration === 0) { + return

Thinking...

+ } + if (duration === undefined) { + return

Thought for a few seconds

+ } + return

Thought for {duration} seconds

+} + +export const ReasoningTrigger = memo(({ className, children, ...props }: ReasoningTriggerProps) => { + const { isStreaming, isOpen, duration } = useReasoning() + + return ( + + {children ?? ( + <> + {getThinkingMessage(isStreaming, duration)} + + + )} + + ) +}) + +export type ReasoningContentProps = ComponentProps & { + children: React.ReactNode +} + +export const ReasoningContent = memo(({ className, children, ...props }: ReasoningContentProps) => ( + + {children} + +)) + +Reasoning.displayName = 'Reasoning' +ReasoningTrigger.displayName = 'ReasoningTrigger' +ReasoningContent.displayName = 'ReasoningContent' diff --git a/src/components/chat/codex/Response.tsx b/src/components/chat/codex/Response.tsx new file mode 100644 index 0000000..061579e --- /dev/null +++ b/src/components/chat/codex/Response.tsx @@ -0,0 +1,129 @@ +import { cn } from '@/lib/utils' +import React from 'react' +import ReactMarkdown from 'react-markdown' +import remarkGfm from 'remark-gfm' +import { CodeBlock, CodeBlockCopyButton } from './CodeBlock' +import { invoke } from '@tauri-apps/api/core' + +export type ResponseProps = React.HTMLAttributes & { + children: React.ReactNode + parseIncompleteMarkdown?: boolean + components?: Record> + allowedImagePrefixes?: string[] + allowedLinkPrefixes?: string[] + defaultOrigin?: string + rehypePlugins?: any[] + remarkPlugins?: any[] +} + +function closeUnfinishedCodeFences(md: string): string { + const fenceCount = (md.match(/```/g) || []).length + if (fenceCount % 2 === 1) return md + '\n```' + return md +} + +function isAllowed(uri: string, allowed: string[] | undefined): boolean { + if (!allowed || allowed.length === 0) return false + if (allowed.includes('*')) return true + return allowed.some((p) => uri.startsWith(p)) +} + +export const Response: React.FC = ({ + className, + children, + parseIncompleteMarkdown = true, + components, + allowedImagePrefixes = ['*'], + allowedLinkPrefixes = ['*', 'file://'], + defaultOrigin, + rehypePlugins = [], + remarkPlugins = [], + ...divProps +}) => { + const raw = typeof children === 'string' ? children : '' + const content = parseIncompleteMarkdown ? closeUnfinishedCodeFences(raw || '') : raw || '' + + const handleFileClick = async (filePath: string, e: React.MouseEvent) => { + e.preventDefault() + try { + await invoke('open_file_in_editor', { filePath }) + } catch (error) { + console.error('Failed to open file:', error) + } + } + + const mergedComponents = { + code: ({ inline, className, children, ...props }: any) => { + const match = /language-(\w+)/.exec(className || '') + if (!inline && match) { + return ( + + + + ) + } + return ( + + {children} + + ) + }, + a: ({ href = '', children, ...props }: any) => { + const safe = typeof href === 'string' && isAllowed(href, allowedLinkPrefixes) + + // Handle file:// links + if (typeof href === 'string' && href.startsWith('file://')) { + const filePath = href.replace('file://', '') + return ( + handleFileClick(filePath, e)} + className="text-blue-500 hover:text-blue-700 underline cursor-pointer" + {...props} + > + {children} + + ) + } + + return ( + + {children} + + ) + }, + img: ({ src = '', alt = '', ...props }: any) => { + const safe = typeof src === 'string' && isAllowed(src, allowedImagePrefixes) + if (!safe) return null + return {alt} + }, + ul: ({ children }: any) =>
    {children}
, + ol: ({ children }: any) => ( +
    {children}
+ ), + li: ({ children }: any) =>
  • {children}
  • , + p: ({ children }: any) =>

    {children}

    , + strong: ({ children }: any) => {children}, + em: ({ children }: any) => {children}, + ...(components || {}), + } as any + + return ( +
    p]:leading-normal [&>p]:my-0 prose prose-sm max-w-none', className)} + {...divProps} + > + {typeof children === 'string' ? ( + + {content} + + ) : ( + children + )} +
    + ) +} diff --git a/src/components/chat/codex/ThinkingDots.tsx b/src/components/chat/codex/ThinkingDots.tsx new file mode 100644 index 0000000..732638f --- /dev/null +++ b/src/components/chat/codex/ThinkingDots.tsx @@ -0,0 +1,37 @@ +import React, { useEffect, useState } from 'react' +import { cn } from '@/lib/utils' + +export interface ThinkingDotsProps extends React.HTMLAttributes { + intervalMs?: number + prefix?: string +} + +// Em‑dash blink indicator: shows and hides — at a steady cadence +export const ThinkingDots: React.FC = ({ + className, + intervalMs = 500, + prefix = '', + ...divProps +}) => { + const [show, setShow] = useState(true) + + useEffect(() => { + const id = setInterval(() => setShow((s) => !s), intervalMs) + return () => clearInterval(id) + }, [intervalMs]) + + return ( +
    + {prefix} + + — + +
    + ) +} + +export default ThinkingDots diff --git a/src/components/chat/codex/__tests__/streamParser.test.ts b/src/components/chat/codex/__tests__/streamParser.test.ts new file mode 100644 index 0000000..c85bd9c --- /dev/null +++ b/src/components/chat/codex/__tests__/streamParser.test.ts @@ -0,0 +1,56 @@ +import { describe, it, expect } from 'vitest' +import { CodexStreamParser } from '../streamParser' + +describe('CodexStreamParser', () => { + it('parses agent item events', () => { + const parser = new CodexStreamParser() + const payload = JSON.stringify({ + type: 'item.completed', + item: { + type: 'agent_message', + id: 'msg_1', + text: 'Hello from Codex', + }, + }) + + expect(parser.feed(payload)).toBe('Hello from Codex') + }) + + it('accumulates response delta events', () => { + const parser = new CodexStreamParser() + + expect( + parser.feed( + 'data: {"type":"response.output_text.delta","delta":{"text":"Hello"}}' + ) + ).toBe('Hello') + + expect( + parser.feed( + 'data: {"type":"response.output_text.delta","delta":{"text":" world"}}' + ) + ).toBe('Hello world') + }) + + it('uses response.completed payload for final text', () => { + const parser = new CodexStreamParser() + + parser.feed('data: {"type":"response.output_text.delta","delta":"Hello"}') + + expect( + parser.feed( + 'data: {"type":"response.completed","response":{"output":[{"type":"output_text","text":"Hello world"}]}}' + ) + ).toBe('Hello world') + }) + + it('handles error payloads gracefully', () => { + const parser = new CodexStreamParser() + + expect( + parser.feed( + 'data: {"type":"response.error","error":{"message":"Agent failed"}}' + ) + ).toBe('Agent failed') + }) +}) diff --git a/src/components/chat/codex/events.ts b/src/components/chat/codex/events.ts new file mode 100644 index 0000000..f5eb604 --- /dev/null +++ b/src/components/chat/codex/events.ts @@ -0,0 +1,110 @@ +import type { ThreadItem } from './items' + +export type ThreadStartedEvent = { + type: 'thread.started' + thread_id: string +} + +export type TurnStartedEvent = { + type: 'turn.started' +} + +export type Usage = { + input_tokens: number + cached_input_tokens: number + output_tokens: number +} + +export type TurnCompletedEvent = { + type: 'turn.completed' + usage: Usage +} + +export type TurnFailedEvent = { + type: 'turn.failed' + error: ThreadError +} + +export type ItemStartedEvent = { + type: 'item.started' + item: ThreadItem +} + +export type ItemUpdatedEvent = { + type: 'item.updated' + item: ThreadItem +} + +export type ItemCompletedEvent = { + type: 'item.completed' + item: ThreadItem +} + +export type ThreadError = { + message: string +} + +export type ThreadErrorEvent = { + type: 'error' + message: string +} + +type DeltaContent = { + type?: string + text?: string +} + +type ResponseDeltaPayload = + | string + | { + text?: string + delta?: string + content?: DeltaContent[] + } + | Array + +export type ResponseOutputTextDeltaEvent = { + type: 'response.output_text.delta' + delta: ResponseDeltaPayload +} + +export type ResponseDeltaEvent = { + type: 'response.delta' + delta: ResponseDeltaPayload +} + +type ResponseOutput = { + type?: string + text?: string +} + +type ResponseEnvelope = { + text?: string + output?: ResponseOutput[] +} + +export type ResponseCompletedEvent = { + type: 'response.completed' + response?: ResponseEnvelope +} + +export type ResponseErrorEvent = { + type: 'response.error' + error?: { + message?: string + } +} + +export type ThreadEvent = + | ThreadStartedEvent + | TurnStartedEvent + | TurnCompletedEvent + | TurnFailedEvent + | ItemStartedEvent + | ItemUpdatedEvent + | ItemCompletedEvent + | ThreadErrorEvent + | ResponseOutputTextDeltaEvent + | ResponseDeltaEvent + | ResponseCompletedEvent + | ResponseErrorEvent diff --git a/src/components/chat/codex/items.ts b/src/components/chat/codex/items.ts new file mode 100644 index 0000000..b373c0d --- /dev/null +++ b/src/components/chat/codex/items.ts @@ -0,0 +1,81 @@ +export type CommandExecutionStatus = 'in_progress' | 'completed' | 'failed' + +export type CommandExecutionItem = { + id: string + type: 'command_execution' + command: string + aggregated_output: string + exit_code?: number + status: CommandExecutionStatus +} + +export type PatchChangeKind = 'add' | 'delete' | 'update' + +export type FileUpdateChange = { + path: string + kind: PatchChangeKind +} + +export type PatchApplyStatus = 'completed' | 'failed' + +export type FileChangeItem = { + id: string + type: 'file_change' + changes: FileUpdateChange[] + status: PatchApplyStatus +} + +export type McpToolCallStatus = 'in_progress' | 'completed' | 'failed' + +export type McpToolCallItem = { + id: string + type: 'mcp_tool_call' + server: string + tool: string + status: McpToolCallStatus +} + +export type AgentMessageItem = { + id: string + type: 'agent_message' + text: string +} + +export type ReasoningItem = { + id: string + type: 'reasoning' + text: string +} + +export type WebSearchItem = { + id: string + type: 'web_search' + query: string +} + +export type ErrorItem = { + id: string + type: 'error' + message: string +} + +export type TodoItem = { + text: string + completed: boolean +} + +export type TodoListItem = { + id: string + type: 'todo_list' + items: TodoItem[] +} + +export type ThreadItem = + | AgentMessageItem + | ReasoningItem + | CommandExecutionItem + | FileChangeItem + | McpToolCallItem + | WebSearchItem + | TodoListItem + | ErrorItem diff --git a/src/components/chat/codex/streamParser.ts b/src/components/chat/codex/streamParser.ts new file mode 100644 index 0000000..ab332de --- /dev/null +++ b/src/components/chat/codex/streamParser.ts @@ -0,0 +1,254 @@ +import type { + ThreadEvent, + ResponseDeltaEvent, + ResponseOutputTextDeltaEvent, + ResponseCompletedEvent, + ResponseErrorEvent, +} from './events' +import type { ThreadItem } from './items' + +export class CodexStreamParser { + private sections = { + reasoning: [] as string[], + messages: [] as string[], + usage: null as { input_tokens: number; cached_input_tokens: number; output_tokens: number } | null, + threadId: null as string | null, + } + + feed(raw: string): string | undefined { + const normalized = this.normalize(raw) + if (!normalized) return undefined + + let event: ThreadEvent | ResponseDeltaEvent | ResponseOutputTextDeltaEvent | ResponseCompletedEvent | ResponseErrorEvent | Record + try { + event = JSON.parse(normalized) + } catch { + return undefined + } + + return this.handleEvent(event) + } + + private handleItem(item: ThreadItem): string | undefined { + switch (item.type) { + case 'agent_message': + this.sections.messages.push(item.text) + return this.buildOutput() + + case 'reasoning': + this.sections.reasoning.push(item.text) + return this.buildOutput() + + case 'command_execution': + const cmdOutput = item.aggregated_output + ? `\n\`\`\`sh\n$ ${item.command}\n${item.aggregated_output}\n\`\`\`` + : '' + this.sections.messages.push( + `• ${item.command}${cmdOutput}` + ) + return this.buildOutput() + + case 'file_change': + const changes = item.changes.map(c => { + const action = c.kind === 'add' ? 'Created' : c.kind === 'delete' ? 'Deleted' : 'Modified' + return `• ${action} [${c.path}](file://${c.path})` + }).join('\n') + this.sections.messages.push(changes) + return this.buildOutput() + + case 'mcp_tool_call': + this.sections.messages.push( + `• ${item.server}/${item.tool}` + ) + return this.buildOutput() + + case 'web_search': + this.sections.messages.push( + `• Searching: ${item.query}` + ) + return this.buildOutput() + + case 'todo_list': + const todos = item.items.map(t => + `${t.completed ? '◎' : '◯'} ${t.text}` + ).join('\n') + this.sections.messages.push(todos) + return this.buildOutput() + + case 'error': + this.sections.messages.push( + `• Error: ${item.message}` + ) + return this.buildOutput() + + default: + return undefined + } + } + + private handleEvent( + event: + | ThreadEvent + | ResponseDeltaEvent + | ResponseOutputTextDeltaEvent + | ResponseCompletedEvent + | ResponseErrorEvent + | Record + ): string | undefined { + if (!event || typeof event !== 'object') return undefined + + const eventType = (event as any).type + + // Handle thread started + if (eventType === 'thread.started') { + this.sections.threadId = (event as any).thread_id + return undefined // Don't show thread ID in output + } + + // Handle turn completed (usage stats) + if (eventType === 'turn.completed') { + const usage = (event as any).usage + if (usage) { + this.sections.usage = usage + return this.buildOutput() + } + return undefined + } + + // Skip turn.started and non-critical errors + if (eventType === 'turn.started' || eventType === 'error') { + return undefined + } + + if ('item' in event && event.item) { + return this.handleItem(event.item as ThreadItem) + } + + if ('delta' in event) { + const deltaText = this.extractDeltaText((event as any).delta) + if (deltaText) { + // Handle streaming deltas if needed + return undefined + } + } + + if (event.type === 'response.completed') { + const text = this.extractResponseText((event as ResponseCompletedEvent).response) + if (text) { + this.sections.messages.push(text) + return this.buildOutput() + } + } + + if (event.type === 'response.error') { + const errorMessage = + (event as ResponseErrorEvent).error?.message ?? 'Codex encountered an error.' + this.sections.messages.push(`❌ Error: ${errorMessage}`) + return this.buildOutput() + } + + return undefined + } + + private buildOutput(): string { + const parts: string[] = [] + + // Add reasoning sections (formatted as italic/thinking blocks) + if (this.sections.reasoning.length > 0) { + for (const reasoning of this.sections.reasoning) { + parts.push(`_${reasoning}_`) + } + } + + // Add messages + if (this.sections.messages.length > 0) { + parts.push(...this.sections.messages) + } + + // Add usage stats at the end if available + if (this.sections.usage) { + const { input_tokens, cached_input_tokens, output_tokens } = this.sections.usage + const total = input_tokens + output_tokens + parts.push( + `\n---\n**Tokens:** ${total.toLocaleString()} total (${input_tokens.toLocaleString()} in, ${output_tokens.toLocaleString()} out${cached_input_tokens > 0 ? `, ${cached_input_tokens.toLocaleString()} cached` : ''})` + ) + } + + return parts.join('\n\n') + } + + private normalize(raw: string): string | undefined { + const trimmed = raw.trim() + if (!trimmed) return undefined + + if (trimmed.startsWith('event:') || trimmed.startsWith('id:')) { + return undefined + } + + if (trimmed.startsWith('data:')) { + const data = trimmed.slice(5).trim() + if (!data || data === '[DONE]') return undefined + return data + } + + return trimmed + } + + private extractDeltaText(delta: unknown): string | undefined { + if (!delta) return undefined + + if (typeof delta === 'string') { + return delta + } + + if (Array.isArray(delta)) { + return delta + .map(part => this.extractDeltaText(part)) + .filter((chunk): chunk is string => Boolean(chunk)) + .join('') || undefined + } + + if (typeof delta === 'object') { + const obj = delta as Record + if (typeof obj.text === 'string') { + return obj.text + } + + if (Array.isArray(obj.content)) { + const text = obj.content + .map(entry => + typeof entry === 'object' && entry !== null && 'text' in entry && typeof entry.text === 'string' + ? entry.text + : '' + ) + .join('') + return text || undefined + } + + if (typeof obj.delta === 'string') { + return obj.delta + } + } + + return undefined + } + + private extractResponseText(response: unknown): string | undefined { + if (!response || typeof response !== 'object') return undefined + + const record = response as Record + + if (typeof record.text === 'string') { + return record.text + } + + if (Array.isArray(record.output)) { + const text = (record.output as Array>) + .map(part => (typeof part.text === 'string' ? part.text : '')) + .join('') + return text || undefined + } + + return undefined + } +} diff --git a/src/scripts/__tests__/codex-sdk-runner.test.ts b/src/scripts/__tests__/codex-sdk-runner.test.ts new file mode 100644 index 0000000..4ba613e --- /dev/null +++ b/src/scripts/__tests__/codex-sdk-runner.test.ts @@ -0,0 +1,90 @@ +import { describe, expect, it, vi, beforeEach } from 'vitest' + +const writes: Array = [] +const errors: Array = [] + +vi.mock('@openai/codex-sdk', () => { + const runStreamed = vi.fn(async () => ({ + events: (async function* () { + yield { type: 'item.started', item: { type: 'agent_message', text: '' } } + yield { + type: 'item.completed', + item: { type: 'agent_message', text: 'Hello from Codex' }, + } + })(), + })) + + const startThread = vi.fn(() => ({ runStreamed })) + + return { + Codex: vi.fn(() => ({ startThread })), + __runStreamed: runStreamed, + __startThread: startThread, + } +}) + +const codexModule: any = await import('@openai/codex-sdk') +const { Codex, __startThread, __runStreamed } = codexModule +const { runCodex } = await import('../../../scripts/codex-sdk-runner.mjs') + +describe('codex sdk runner', () => { + beforeEach(() => { + writes.length = 0 + errors.length = 0 + vi.clearAllMocks() + process.env.CODEX_SDK_DIST_PATH = '' + }) + + it('streams events to the provided writer', async () => { + await runCodex( + { + sessionId: 'sess-1', + prompt: 'hello', + workingDirectory: '/tmp/demo', + sandboxMode: 'workspace-write', + model: 'o4' + }, + { + write: async (msg: string) => writes.push(JSON.parse(msg)), + writeError: async (msg: string) => errors.push(JSON.parse(msg)), + } + ) + + expect(Codex).toHaveBeenCalledWith({ workingDirectory: '/tmp/demo' }) + expect(__startThread).toHaveBeenCalledWith({ + model: 'o4', + sandboxMode: 'workspace-write', + workingDirectory: '/tmp/demo', + skipGitRepoCheck: true, + }) + + expect(__runStreamed).toHaveBeenCalledWith('hello') + expect(writes).toEqual([ + { sessionId: 'sess-1', content: JSON.stringify({ type: 'item.started', item: { type: 'agent_message', text: '' } }), finished: false }, + { sessionId: 'sess-1', content: JSON.stringify({ type: 'item.completed', item: { type: 'agent_message', text: 'Hello from Codex' } }), finished: false }, + { sessionId: 'sess-1', content: '', finished: true } + ]) + expect(errors).toEqual([]) + }) + + it('reports errors through writeError channel', async () => { + __runStreamed.mockImplementationOnce(async () => { + throw new Error('boom') + }) + + await runCodex( + { + sessionId: 'sess-2', + prompt: 'broken', + }, + { + write: async (msg: string) => writes.push(JSON.parse(msg)), + writeError: async (msg: string) => errors.push(JSON.parse(msg)), + } + ) + + expect(errors).toEqual([ + { sessionId: 'sess-2', error: 'boom', finished: true } + ]) + }) +})