diff --git a/Cargo.lock b/Cargo.lock index a4374b5..f39a806 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,6 +112,15 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + [[package]] name = "bstr" version = "1.12.1" @@ -229,6 +238,15 @@ dependencies = [ "url", ] +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + [[package]] name = "crc32fast" version = "1.5.0" @@ -238,6 +256,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + [[package]] name = "deranged" version = "0.5.8" @@ -253,6 +281,16 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -364,6 +402,16 @@ dependencies = [ "libc", ] +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + [[package]] name = "getrandom" version = "0.2.17" @@ -1005,6 +1053,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "shlex" version = "1.3.0" @@ -1025,7 +1084,7 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "specsync" -version = "3.2.0" +version = "3.3.0" dependencies = [ "assert_cmd", "clap", @@ -1036,6 +1095,7 @@ dependencies = [ "regex", "serde", "serde_json", + "sha2", "tempfile", "ureq", "walkdir", @@ -1141,6 +1201,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + [[package]] name = "unicode-ident" version = "1.0.24" diff --git a/Cargo.toml b/Cargo.toml index 4cc201c..1622f4f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,14 +1,16 @@ [package] name = "specsync" -version = "3.2.0" +version = "3.3.0" edition = "2024" +rust-version = "1.85" description = "Bidirectional spec-to-code validation with schema column checking — 11 languages, single binary" license = "MIT" readme = "README.md" homepage = "https://github.com/CorvidLabs/spec-sync" repository = "https://github.com/CorvidLabs/spec-sync" -keywords = ["spec", "documentation", "validation", "coverage"] +keywords = ["spec", "documentation", "validation", "coverage", "code-quality"] categories = ["development-tools", "command-line-utilities"] +exclude = ["tests/", "specs/", ".github/", ".specsync/"] [[bin]] name = "specsync" @@ -23,6 +25,7 @@ walkdir = "2" colored = "3" notify = "7" notify-debouncer-full = "0.4" +sha2 = "0.10" ureq = { version = "3", features = ["json"] } [dev-dependencies] diff --git a/src/ai.rs b/src/ai.rs index f69df30..9c40056 100644 --- a/src/ai.rs +++ b/src/ai.rs @@ -10,6 +10,18 @@ const MAX_FILE_CHARS: usize = 30_000; const MAX_PROMPT_CHARS: usize = 150_000; const DEFAULT_AI_TIMEOUT_SECS: u64 = 120; +/// Truncate a string to at most `max_bytes` bytes on a valid UTF-8 char boundary. +fn safe_truncate(s: &str, max_bytes: usize) -> &str { + if s.len() <= max_bytes { + return s; + } + let mut end = max_bytes; + while end > 0 && !s.is_char_boundary(end) { + end -= 1; + } + &s[..end] +} + /// A resolved provider ready to execute — either a CLI command or a direct API call. #[derive(Debug, Clone)] pub enum ResolvedProvider { @@ -274,7 +286,7 @@ fn build_prompt( let truncated = if content.len() > MAX_FILE_CHARS { format!( "{}\n\n[... truncated at {MAX_FILE_CHARS} chars ...]", - &content[..MAX_FILE_CHARS] + safe_truncate(content, MAX_FILE_CHARS) ) } else { content.clone() @@ -679,11 +691,7 @@ fn build_regen_prompt( prompt.push_str(&format!("(Skipping {path} — size budget exceeded)\n\n")); continue; } - let truncated = if content.len() > 30_000 { - &content[..30_000] - } else { - content.as_str() - }; + let truncated = safe_truncate(content, 30_000); prompt.push_str(&format!("### `{path}`\n\n```\n{truncated}\n```\n\n")); total_len += truncated.len(); } diff --git a/src/archive.rs b/src/archive.rs new file mode 100644 index 0000000..050c19d --- /dev/null +++ b/src/archive.rs @@ -0,0 +1,218 @@ +use colored::Colorize; +use std::fs; +use std::path::Path; + +use crate::validator::find_spec_files; + +/// Result of archiving tasks in a single tasks.md file. +pub struct ArchiveResult { + pub tasks_path: String, + pub archived_count: usize, +} + +/// Archive completed tasks across all companion tasks.md files. +/// Moves `- [x]` items to an `## Archive` section at the bottom. +pub fn archive_tasks(root: &Path, specs_dir: &Path, dry_run: bool) -> Vec { + let spec_files = find_spec_files(specs_dir); + let mut results = Vec::new(); + + for spec_path in &spec_files { + // Find the companion tasks.md in the same directory + let spec_dir = match spec_path.parent() { + Some(d) => d, + None => continue, + }; + let tasks_path = spec_dir.join("tasks.md"); + if !tasks_path.exists() { + continue; + } + + let content = match fs::read_to_string(&tasks_path) { + Ok(c) => c, + Err(_) => continue, + }; + + let rel_path = tasks_path + .strip_prefix(root) + .unwrap_or(&tasks_path) + .to_string_lossy() + .to_string(); + + if let Some((new_content, count)) = archive_completed_tasks(&content) { + if count > 0 { + if !dry_run { + if let Err(e) = fs::write(&tasks_path, &new_content) { + eprintln!( + "{} Failed to write {}: {e}", + "error:".red().bold(), + rel_path + ); + continue; + } + } + results.push(ArchiveResult { + tasks_path: rel_path, + archived_count: count, + }); + } + } + } + + results +} + +/// Archive completed tasks in a tasks.md file. +/// Returns (new_content, archived_count) if any tasks were archived. +fn archive_completed_tasks(content: &str) -> Option<(String, usize)> { + let mut completed_tasks: Vec = Vec::new(); + let mut remaining_lines: Vec = Vec::new(); + let mut in_archive = false; + let mut existing_archive: Vec = Vec::new(); + + for line in content.lines() { + let trimmed = line.trim(); + + // Track if we're in the archive section + if trimmed == "## Archive" { + in_archive = true; + continue; + } + if in_archive { + if trimmed.starts_with("## ") { + // Exited archive section into next section + in_archive = false; + remaining_lines.push(line.to_string()); + } else { + existing_archive.push(line.to_string()); + } + continue; + } + + // Check for completed tasks outside the archive section + if trimmed.starts_with("- [x]") || trimmed.starts_with("- [X]") { + completed_tasks.push(line.to_string()); + } else { + remaining_lines.push(line.to_string()); + } + } + + if completed_tasks.is_empty() { + return None; + } + + let count = completed_tasks.len(); + + // Build new content: remaining lines + archive section + let mut new_content = remaining_lines.join("\n"); + + // Ensure trailing newline before archive section + if !new_content.ends_with('\n') { + new_content.push('\n'); + } + new_content.push('\n'); + new_content.push_str("## Archive\n\n"); + + // Add existing archive entries first + for line in &existing_archive { + if !line.trim().is_empty() { + new_content.push_str(line); + new_content.push('\n'); + } + } + + // Add newly archived tasks + for task in &completed_tasks { + new_content.push_str(task); + new_content.push('\n'); + } + + Some((new_content, count)) +} + +/// Count completed tasks across all tasks.md files (for warnings in check command). +pub fn count_completed_tasks(specs_dir: &Path) -> usize { + let spec_files = find_spec_files(specs_dir); + let mut total = 0; + + for spec_path in &spec_files { + let spec_dir = match spec_path.parent() { + Some(d) => d, + None => continue, + }; + let tasks_path = spec_dir.join("tasks.md"); + if !tasks_path.exists() { + continue; + } + if let Ok(content) = fs::read_to_string(&tasks_path) { + total += content + .lines() + .filter(|l| { + let t = l.trim(); + t.starts_with("- [x]") || t.starts_with("- [X]") + }) + .count(); + } + } + + total +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_archive_completed_tasks() { + let content = r#"--- +spec: test.spec.md +--- + +## Tasks + +- [ ] Uncompleted task +- [x] Done task 1 +- [ ] Another open task +- [x] Done task 2 + +## Gaps + +Nothing here. +"#; + + let (new_content, count) = archive_completed_tasks(content).unwrap(); + assert_eq!(count, 2); + assert!(new_content.contains("## Archive")); + assert!(new_content.contains("- [x] Done task 1")); + assert!(new_content.contains("- [x] Done task 2")); + assert!(new_content.contains("- [ ] Uncompleted task")); + // Archived tasks should not appear in the Tasks section + assert!(!new_content[..new_content.find("## Archive").unwrap()].contains("- [x]")); + } + + #[test] + fn test_archive_no_completed() { + let content = r#"## Tasks + +- [ ] Open task +"#; + + assert!(archive_completed_tasks(content).is_none()); + } + + #[test] + fn test_archive_preserves_existing() { + let content = r#"## Tasks + +- [x] New done task + +## Archive + +- [x] Previously archived +"#; + + let (new_content, count) = archive_completed_tasks(content).unwrap(); + assert_eq!(count, 1); + assert!(new_content.contains("- [x] Previously archived")); + assert!(new_content.contains("- [x] New done task")); + } +} diff --git a/src/compact.rs b/src/compact.rs new file mode 100644 index 0000000..396b917 --- /dev/null +++ b/src/compact.rs @@ -0,0 +1,247 @@ +use colored::Colorize; +use std::fs; +use std::path::Path; + +use crate::validator::find_spec_files; + +/// Result of compacting a single spec's changelog. +pub struct CompactResult { + pub spec_path: String, + pub original_entries: usize, + pub compacted_entries: usize, + pub removed: usize, +} + +/// Compact changelog entries across all specs. +/// Keeps the last `keep` entries and summarizes older ones. +pub fn compact_changelogs( + root: &Path, + specs_dir: &Path, + keep: usize, + dry_run: bool, +) -> Vec { + let spec_files = find_spec_files(specs_dir); + let mut results = Vec::new(); + + for spec_path in &spec_files { + let content = match fs::read_to_string(spec_path) { + Ok(c) => c, + Err(_) => continue, + }; + + let rel_path = spec_path + .strip_prefix(root) + .unwrap_or(spec_path) + .to_string_lossy() + .to_string(); + + if let Some((new_content, result)) = compact_spec_changelog(&content, &rel_path, keep) { + if result.removed > 0 { + if !dry_run { + if let Err(e) = fs::write(spec_path, &new_content) { + eprintln!( + "{} Failed to write {}: {e}", + "error:".red().bold(), + rel_path + ); + continue; + } + } + results.push(result); + } + } + } + + results +} + +/// Compact the changelog in a single spec file's content. +/// Returns (new_content, result) if the changelog was found. +fn compact_spec_changelog( + content: &str, + rel_path: &str, + keep: usize, +) -> Option<(String, CompactResult)> { + // Find the ## Change Log section + let changelog_marker = "## Change Log"; + let cl_start = content.find(changelog_marker)?; + + // Find where this section ends (next ## heading or EOF) + let after_header = cl_start + changelog_marker.len(); + let section_end = content[after_header..] + .find("\n## ") + .map(|p| after_header + p) + .unwrap_or(content.len()); + + let section = &content[cl_start..section_end]; + let lines: Vec<&str> = section.lines().collect(); + + // Find table rows: lines starting with | that are not header/separator. + // The first two table lines are always header + separator; data rows follow. + let mut header_lines: Vec = Vec::new(); + let mut data_rows: Vec<(usize, &str)> = Vec::new(); + let mut table_line_count = 0usize; + + for (i, line) in lines.iter().enumerate() { + let trimmed = line.trim(); + if !trimmed.starts_with('|') { + continue; + } + table_line_count += 1; + // First two table lines are header row and separator row + if table_line_count <= 2 { + header_lines.push(i); + continue; + } + // Data row + data_rows.push((i, trimmed)); + } + + let total = data_rows.len(); + if total <= keep { + return Some(( + content.to_string(), + CompactResult { + spec_path: rel_path.to_string(), + original_entries: total, + compacted_entries: total, + removed: 0, + }, + )); + } + + // Keep the last `keep` entries, summarize the rest + let to_remove = total - keep; + let removed_rows = &data_rows[..to_remove]; + + // Extract date range from removed entries + let first_date = extract_first_cell(removed_rows.first().map(|(_, l)| *l).unwrap_or("")); + let last_date = extract_first_cell(removed_rows.last().map(|(_, l)| *l).unwrap_or("")); + + // Detect column count from first data row + let col_count = data_rows + .first() + .map(|(_, l)| l.matches('|').count().saturating_sub(1)) + .unwrap_or(2); + + let summary_row = if col_count >= 3 { + format!("| {first_date} — {last_date} | — | Compacted: {to_remove} entries |") + } else { + format!("| {first_date} — {last_date} | Compacted: {to_remove} entries |") + }; + + // Build the indices to remove + let remove_indices: std::collections::HashSet = + removed_rows.iter().map(|(i, _)| *i).collect(); + + // Reconstruct the section + let mut new_lines: Vec = Vec::new(); + let mut inserted_summary = false; + + for (i, line) in lines.iter().enumerate() { + if remove_indices.contains(&i) { + if !inserted_summary { + new_lines.push(summary_row.clone()); + inserted_summary = true; + } + // Skip this line (it was compacted) + } else { + new_lines.push(line.to_string()); + } + } + + let new_section = new_lines.join("\n"); + let mut new_content = String::new(); + new_content.push_str(&content[..cl_start]); + new_content.push_str(&new_section); + new_content.push_str(&content[section_end..]); + + Some(( + new_content, + CompactResult { + spec_path: rel_path.to_string(), + original_entries: total, + compacted_entries: keep + 1, // kept + summary + removed: to_remove, + }, + )) +} + +/// Extract the first cell value from a markdown table row. +fn extract_first_cell(row: &str) -> String { + let parts: Vec<&str> = row.split('|').collect(); + if parts.len() >= 2 { + parts[1].trim().to_string() + } else { + "?".to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_compact_changelog() { + let content = r#"--- +module: test +version: 1 +status: active +files: + - src/test.rs +--- + +## Purpose + +Test module. + +## Change Log + +| Date | Change | +|------|--------| +| 2026-01-01 | First | +| 2026-01-15 | Second | +| 2026-02-01 | Third | +| 2026-02-15 | Fourth | +| 2026-03-01 | Fifth | +"#; + + let (new_content, result) = compact_spec_changelog(content, "test.spec.md", 3).unwrap(); + assert_eq!(result.original_entries, 5); + assert_eq!(result.removed, 2); + assert!(new_content.contains("Compacted: 2 entries")); + assert!(new_content.contains("| 2026-02-01 | Third |")); + assert!(new_content.contains("| 2026-03-01 | Fifth |")); + assert!(!new_content.contains("| 2026-01-01 | First |")); + } + + #[test] + fn test_compact_no_change_needed() { + let content = r#"## Change Log + +| Date | Change | +|------|--------| +| 2026-03-01 | Only entry | +"#; + + let (_, result) = compact_spec_changelog(content, "test.spec.md", 5).unwrap(); + assert_eq!(result.removed, 0); + } + + #[test] + fn test_compact_three_column_table() { + let content = r#"## Change Log + +| Date | Author | Change | +|------|--------|--------| +| 2026-01-01 | alice | First | +| 2026-02-01 | bob | Second | +| 2026-03-01 | carol | Third | +"#; + + let (new_content, result) = compact_spec_changelog(content, "test.spec.md", 1).unwrap(); + assert_eq!(result.removed, 2); + assert!(new_content.contains("| — |")); // author placeholder + assert!(new_content.contains("Compacted: 2 entries")); + } +} diff --git a/src/config.rs b/src/config.rs index 6eb4f93..f378dea 100644 --- a/src/config.rs +++ b/src/config.rs @@ -183,6 +183,8 @@ const KNOWN_JSON_KEYS: &[&str] = &[ "aiApiKey", "aiBaseUrl", "aiTimeout", + "rules", + "taskArchiveDays", ]; fn load_json_config(config_path: &Path, root: &Path) -> SpecSyncConfig { @@ -240,10 +242,17 @@ fn load_toml_config(config_path: &Path, root: &Path) -> SpecSyncConfig { let mut config = SpecSyncConfig::default(); let mut has_source_dirs = false; + let mut current_section: Option = None; for line in content.lines() { let line = line.trim(); - if line.is_empty() || line.starts_with('#') || line.starts_with('[') { + if line.is_empty() || line.starts_with('#') { + continue; + } + + // Track TOML section headers like [rules] + if line.starts_with('[') && line.ends_with(']') { + current_section = Some(line[1..line.len() - 1].trim().to_string()); continue; } @@ -251,6 +260,20 @@ fn load_toml_config(config_path: &Path, root: &Path) -> SpecSyncConfig { let key = line[..eq_pos].trim(); let value = line[eq_pos + 1..].trim(); + // Route to section-specific parsing + if let Some(ref section) = current_section { + match section.as_str() { + "rules" => { + parse_toml_rules_key(key, value, &mut config.rules); + continue; + } + _ => { + // Unknown section — skip silently + continue; + } + } + } + match key { "specs_dir" => config.specs_dir = parse_toml_string(value), "source_dirs" => { @@ -292,6 +315,11 @@ fn load_toml_config(config_path: &Path, root: &Path) -> SpecSyncConfig { "required_sections" => { config.required_sections = parse_toml_string_array(value); } + "task_archive_days" => { + if let Ok(n) = value.trim().parse::() { + config.task_archive_days = Some(n); + } + } _ => { eprintln!("Warning: unknown key \"{key}\" in .specsync.toml (ignored)"); } @@ -330,6 +358,41 @@ fn parse_toml_string_array(s: &str) -> Vec { .collect() } +/// Parse a key=value pair inside a `[rules]` TOML section. +fn parse_toml_rules_key(key: &str, value: &str, rules: &mut crate::types::ValidationRules) { + match key { + "max_changelog_entries" => { + if let Ok(n) = value.trim().parse::() { + rules.max_changelog_entries = Some(n); + } + } + "require_behavioral_examples" => { + rules.require_behavioral_examples = Some(parse_toml_bool(value)); + } + "min_invariants" => { + if let Ok(n) = value.trim().parse::() { + rules.min_invariants = Some(n); + } + } + "max_spec_size_kb" => { + if let Ok(n) = value.trim().parse::() { + rules.max_spec_size_kb = Some(n); + } + } + "require_depends_on" => { + rules.require_depends_on = Some(parse_toml_bool(value)); + } + _ => { + eprintln!("Warning: unknown rule \"{key}\" in [rules] section (ignored)"); + } + } +} + +/// Parse a TOML boolean value. +fn parse_toml_bool(s: &str) -> bool { + matches!(s.trim(), "true" | "yes" | "1") +} + /// Default schema pattern for SQL table extraction. pub fn default_schema_pattern() -> &'static str { r"CREATE (?:VIRTUAL )?TABLE(?:\s+IF NOT EXISTS)?\s+(\w+)" diff --git a/src/hash_cache.rs b/src/hash_cache.rs index dc032ec..88b8f4a 100644 --- a/src/hash_cache.rs +++ b/src/hash_cache.rs @@ -1,4 +1,5 @@ use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; use std::collections::HashMap; use std::fmt; use std::fs; @@ -39,7 +40,8 @@ impl HashCache { let dir = root.join(CACHE_DIR); fs::create_dir_all(&dir)?; let path = dir.join(CACHE_FILE); - let json = serde_json::to_string_pretty(self)?; + let json = serde_json::to_string_pretty(self) + .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; fs::write(path, json) } @@ -57,7 +59,7 @@ impl HashCache { } hasher.update(&buf[..n]); } - Some(hasher.hex_digest()) + Some(format!("{:x}", hasher.finalize())) } /// Check whether a file has changed since the last cached hash. @@ -315,145 +317,11 @@ pub fn extract_frontmatter_files(content: &str) -> Vec { files } -// ---------- Minimal SHA-256 implementation ---------- -// Using a small inline implementation to avoid adding a dependency. -// This is the standard FIPS 180-4 algorithm. - -struct Sha256 { - state: [u32; 8], - buf: Vec, - len: u64, -} - -const K: [u32; 64] = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, -]; - -impl Sha256 { - fn new() -> Self { - Self { - state: [ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, - 0x5be0cd19, - ], - buf: Vec::new(), - len: 0, - } - } - - fn update(&mut self, data: &[u8]) { - self.len += data.len() as u64; - self.buf.extend_from_slice(data); - while self.buf.len() >= 64 { - let block: [u8; 64] = self.buf[..64].try_into().unwrap(); - self.compress(&block); - self.buf.drain(..64); - } - } - - fn compress(&mut self, block: &[u8; 64]) { - let mut w = [0u32; 64]; - for i in 0..16 { - w[i] = u32::from_be_bytes(block[i * 4..i * 4 + 4].try_into().unwrap()); - } - for i in 16..64 { - let s0 = w[i - 15].rotate_right(7) ^ w[i - 15].rotate_right(18) ^ (w[i - 15] >> 3); - let s1 = w[i - 2].rotate_right(17) ^ w[i - 2].rotate_right(19) ^ (w[i - 2] >> 10); - w[i] = w[i - 16] - .wrapping_add(s0) - .wrapping_add(w[i - 7]) - .wrapping_add(s1); - } - - let [mut a, mut b, mut c, mut d, mut e, mut f, mut g, mut h] = self.state; - - for i in 0..64 { - let s1 = e.rotate_right(6) ^ e.rotate_right(11) ^ e.rotate_right(25); - let ch = (e & f) ^ (!e & g); - let temp1 = h - .wrapping_add(s1) - .wrapping_add(ch) - .wrapping_add(K[i]) - .wrapping_add(w[i]); - let s0 = a.rotate_right(2) ^ a.rotate_right(13) ^ a.rotate_right(22); - let maj = (a & b) ^ (a & c) ^ (b & c); - let temp2 = s0.wrapping_add(maj); - - h = g; - g = f; - f = e; - e = d.wrapping_add(temp1); - d = c; - c = b; - b = a; - a = temp1.wrapping_add(temp2); - } - - self.state[0] = self.state[0].wrapping_add(a); - self.state[1] = self.state[1].wrapping_add(b); - self.state[2] = self.state[2].wrapping_add(c); - self.state[3] = self.state[3].wrapping_add(d); - self.state[4] = self.state[4].wrapping_add(e); - self.state[5] = self.state[5].wrapping_add(f); - self.state[6] = self.state[6].wrapping_add(g); - self.state[7] = self.state[7].wrapping_add(h); - } - - fn hex_digest(mut self) -> String { - // Padding - let bit_len = self.len * 8; - self.buf.push(0x80); - while self.buf.len() % 64 != 56 { - self.buf.push(0); - } - self.buf.extend_from_slice(&bit_len.to_be_bytes()); - - // Process remaining blocks - while self.buf.len() >= 64 { - let block: [u8; 64] = self.buf[..64].try_into().unwrap(); - self.compress(&block); - self.buf.drain(..64); - } - - self.state - .iter() - .map(|word| format!("{word:08x}")) - .collect() - } -} - #[cfg(test)] mod tests { use super::*; use std::fs; - #[test] - fn sha256_empty() { - let mut h = Sha256::new(); - h.update(b""); - assert_eq!( - h.hex_digest(), - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" - ); - } - - #[test] - fn sha256_hello() { - let mut h = Sha256::new(); - h.update(b"hello"); - assert_eq!( - h.hex_digest(), - "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" - ); - } - #[test] fn cache_round_trip() { let dir = tempfile::tempdir().unwrap(); diff --git a/src/main.rs b/src/main.rs index 3584dd6..502bd63 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,6 @@ mod ai; +mod archive; +mod compact; mod config; mod exports; mod generator; @@ -12,6 +14,7 @@ mod schema; mod scoring; mod types; mod validator; +mod view; mod watch; use clap::{Parser, Subcommand}; @@ -117,6 +120,30 @@ enum Command { #[command(subcommand)] action: HooksAction, }, + /// Compact changelog entries in spec files to prevent unbounded growth + Compact { + /// Keep the last N changelog entries (default: 10) + #[arg(long, default_value = "10")] + keep: usize, + /// Show what would be compacted without writing files + #[arg(long)] + dry_run: bool, + }, + /// Archive completed tasks from companion tasks.md files + ArchiveTasks { + /// Show what would be archived without writing files + #[arg(long)] + dry_run: bool, + }, + /// View a spec filtered by role (dev, qa, product, agent) + View { + /// Role to filter by: dev, qa, product, agent + #[arg(long)] + role: String, + /// Specific spec module to view (shows all if omitted) + #[arg(long)] + spec: Option, + }, } #[derive(Subcommand)] @@ -224,6 +251,9 @@ fn run() { Command::Resolve { remote } => cmd_resolve(&root, remote), Command::Diff { base } => cmd_diff(&root, &base, format), Command::Hooks { action } => cmd_hooks(&root, action), + Command::Compact { keep, dry_run } => cmd_compact(&root, keep, dry_run), + Command::ArchiveTasks { dry_run } => cmd_archive_tasks(&root, dry_run), + Command::View { role, spec } => cmd_view(&root, &role, spec.as_deref()), } } @@ -288,6 +318,123 @@ fn collect_hook_targets( targets } +fn cmd_compact(root: &Path, keep: usize, dry_run: bool) { + let config = load_config(root); + let specs_dir = root.join(&config.specs_dir); + + if dry_run { + println!("{} Dry run — no files will be modified\n", "ℹ".cyan()); + } + + let results = compact::compact_changelogs(root, &specs_dir, keep, dry_run); + + if results.is_empty() { + println!( + "{}", + "No changelogs need compaction (all within limit).".green() + ); + return; + } + + for r in &results { + let verb = if dry_run { + "would compact" + } else { + "compacted" + }; + println!( + " {} {} — {verb} {} entries (kept {})", + "✓".green(), + r.spec_path, + r.removed, + r.compacted_entries, + ); + } + + let total_removed: usize = results.iter().map(|r| r.removed).sum(); + println!( + "\n{} {} entries across {} spec(s)", + if dry_run { + "Would compact".to_string() + } else { + "Compacted".to_string() + }, + total_removed, + results.len() + ); +} + +fn cmd_archive_tasks(root: &Path, dry_run: bool) { + let config = load_config(root); + let specs_dir = root.join(&config.specs_dir); + + if dry_run { + println!("{} Dry run — no files will be modified\n", "ℹ".cyan()); + } + + let results = archive::archive_tasks(root, &specs_dir, dry_run); + + if results.is_empty() { + println!("{}", "No completed tasks to archive.".green()); + return; + } + + for r in &results { + let verb = if dry_run { "would archive" } else { "archived" }; + println!( + " {} {} — {verb} {} task(s)", + "✓".green(), + r.tasks_path, + r.archived_count, + ); + } + + let total: usize = results.iter().map(|r| r.archived_count).sum(); + println!( + "\n{} {} task(s) across {} file(s)", + if dry_run { + "Would archive".to_string() + } else { + "Archived".to_string() + }, + total, + results.len() + ); +} + +fn cmd_view(root: &Path, role: &str, spec_filter: Option<&str>) { + let config = load_config(root); + let specs_dir = root.join(&config.specs_dir); + let spec_files = find_spec_files(&specs_dir); + + if spec_files.is_empty() { + eprintln!("No spec files found in {}/", config.specs_dir); + process::exit(1); + } + + for spec_path in &spec_files { + // If a specific spec was requested, filter by module name + if let Some(filter) = spec_filter { + let name = spec_path.file_stem().and_then(|s| s.to_str()).unwrap_or(""); + // Strip .spec suffix if present + let module_name = name.strip_suffix(".spec").unwrap_or(name); + if module_name != filter { + continue; + } + } + + match view::view_spec(spec_path, role) { + Ok(output) => { + println!("{output}"); + println!("---\n"); + } + Err(e) => { + eprintln!("{} {e}", "error:".red().bold()); + } + } + } +} + fn cmd_init(root: &Path) { let config_path = root.join("specsync.json"); let toml_path = root.join(".specsync.toml"); @@ -452,8 +599,16 @@ fn cmd_check( let _ = std::io::stderr().flush(); let mut answer = String::new(); let _ = std::io::stdin().read_line(&mut answer); - if !answer.trim().eq_ignore_ascii_case("y") { - // User declined — just continue with normal validation + if answer.trim().eq_ignore_ascii_case("y") { + let regen_count = + auto_regen_stale_specs(root, &requirements_stale_specs, &config, format); + if regen_count > 0 { + println!( + "{} Re-generated {regen_count} spec(s) from updated requirements\n", + "✓".green() + ); + } + } else { println!(" Skipping re-validation. Use --fix to auto-regenerate.\n"); } } @@ -586,49 +741,26 @@ fn auto_regen_stale_specs( .to_string_lossy() .to_string(); - // Find the requirements file + // Find the requirements file (current convention, then legacy) let parent = match spec_path.parent() { Some(p) => p, None => continue, }; + let stem = spec_path.file_stem().and_then(|s| s.to_str()).unwrap_or(""); + let module_name = stem.strip_suffix(".spec").unwrap_or(stem); + let req_path = parent.join("requirements.md"); - if !req_path.exists() { - // Try legacy name - let stem = spec_path.file_stem().and_then(|s| s.to_str()).unwrap_or(""); - let module = stem.strip_suffix(".spec").unwrap_or(stem); - let legacy = parent.join(format!("{module}.req.md")); - if !legacy.exists() { + let req_path = if req_path.exists() { + req_path + } else { + let legacy = parent.join(format!("{module_name}.req.md")); + if legacy.exists() { + legacy + } else { continue; } - // Use legacy path - let module_name = module; - if matches!(format, types::OutputFormat::Text) { - println!(" {} Regenerating {spec_rel}...", "⟳".cyan()); - } - match ai::regenerate_spec_with_ai( - module_name, - spec_path, - &legacy, - root, - config, - &provider, - ) { - Ok(new_spec) => { - if fs::write(spec_path, &new_spec).is_ok() { - regen_count += 1; - } - } - Err(e) => { - if matches!(format, types::OutputFormat::Text) { - eprintln!(" {} Failed to regenerate {spec_rel}: {e}", "✗".red()); - } - } - } - continue; - } + }; - let stem = spec_path.file_stem().and_then(|s| s.to_str()).unwrap_or(""); - let module_name = stem.strip_suffix(".spec").unwrap_or(stem); if matches!(format, types::OutputFormat::Text) { println!(" {} Regenerating {spec_rel}...", "⟳".cyan()); } diff --git a/src/mcp.rs b/src/mcp.rs index 6b4110c..11fffcf 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -331,6 +331,14 @@ fn tool_check(root: &Path, arguments: &Value) -> Result { } } + // Add staleness warnings into the warnings array for consistency + for entry in &stale_entries { + if let Some(msg) = entry["message"].as_str() { + let spec = entry["spec"].as_str().unwrap_or("unknown"); + all_warnings.push(json!(format!("{spec}: {msg}"))); + } + } + let coverage = compute_coverage(root, &spec_files, &config); let staleness_warnings = stale_entries.len(); let effective_warnings = total_warnings + staleness_warnings; diff --git a/src/parser.rs b/src/parser.rs index a73d37a..e2533c7 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -82,6 +82,7 @@ fn set_scalar(fm: &mut Frontmatter, key: &str, value: &str) { "module" => fm.module = Some(value.to_string()), "version" => fm.version = Some(value.to_string()), "status" => fm.status = Some(value.to_string()), + "agent_policy" => fm.agent_policy = Some(value.to_string()), _ => {} } } diff --git a/src/types.rs b/src/types.rs index 5d73ba3..d231ac2 100644 --- a/src/types.rs +++ b/src/types.rs @@ -119,6 +119,37 @@ pub enum OutputFormat { Markdown, } +/// Valid spec lifecycle statuses. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SpecStatus { + Draft, + Active, + Stable, + Deprecated, +} + +impl SpecStatus { + /// Parse a status string (case-insensitive). + pub fn from_str_loose(s: &str) -> Option { + match s.to_lowercase().as_str() { + "draft" => Some(Self::Draft), + "active" => Some(Self::Active), + "stable" => Some(Self::Stable), + "deprecated" => Some(Self::Deprecated), + _ => None, + } + } + + pub fn as_str(&self) -> &'static str { + match self { + Self::Draft => "draft", + Self::Active => "active", + Self::Stable => "stable", + Self::Deprecated => "deprecated", + } + } +} + /// YAML frontmatter parsed from a spec file. #[derive(Debug, Default, Clone)] pub struct Frontmatter { @@ -128,6 +159,14 @@ pub struct Frontmatter { pub files: Vec, pub db_tables: Vec, pub depends_on: Vec, + pub agent_policy: Option, +} + +impl Frontmatter { + /// Parse the status field into a typed enum. + pub fn parsed_status(&self) -> Option { + self.status.as_deref().and_then(SpecStatus::from_str_loose) + } } /// Result of validating a single spec. @@ -244,6 +283,35 @@ pub struct SpecSyncConfig { /// Timeout in seconds for each AI command invocation (default: 120). #[serde(default)] pub ai_timeout: Option, + + /// Custom validation rules for project-specific lint checks. + #[serde(default)] + pub rules: ValidationRules, + + /// Auto-archive completed tasks older than this many days. + #[serde(default)] + pub task_archive_days: Option, +} + +/// Custom validation rules configurable per-project. +#[derive(Debug, Default, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ValidationRules { + /// Warn if a spec's Change Log has more entries than this. + #[serde(default)] + pub max_changelog_entries: Option, + /// Require at least one Behavioral Example scenario. + #[serde(default)] + pub require_behavioral_examples: Option, + /// Minimum number of invariants required. + #[serde(default)] + pub min_invariants: Option, + /// Warn if spec file exceeds this size in KB. + #[serde(default)] + pub max_spec_size_kb: Option, + /// Require non-empty depends_on in frontmatter. + #[serde(default)] + pub require_depends_on: Option, } /// A user-defined module grouping in specsync.json. @@ -409,6 +477,8 @@ impl Default for SpecSyncConfig { ai_api_key: None, ai_base_url: None, ai_timeout: None, + rules: ValidationRules::default(), + task_archive_days: None, } } } diff --git a/src/validator.rs b/src/validator.rs index 992370d..b2d3c38 100644 --- a/src/validator.rs +++ b/src/validator.rs @@ -7,8 +7,16 @@ use regex::Regex; use std::collections::{HashMap, HashSet}; use std::fs; use std::path::{Path, PathBuf}; +use std::sync::LazyLock; use walkdir::WalkDir; +static CONSUMED_BY_RE: LazyLock = + LazyLock::new(|| Regex::new(r"(?s)### Consumed By\s*\n(.*?)(?:\n## |\n### |$)").unwrap()); +static FILE_REF_RE: LazyLock = + LazyLock::new(|| Regex::new(r"\|\s*`([^`]+\.\w+)`\s*\|").unwrap()); +static NUMBERED_RE: LazyLock = + LazyLock::new(|| Regex::new(r"(?m)^\d+\.\s+\S").unwrap()); + /// Check if a dependency reference is a cross-project reference. /// Cross-project refs use the format `owner/repo@module` (e.g. `corvid-labs/algochat@auth`). pub fn is_cross_project_ref(dep: &str) -> bool { @@ -190,9 +198,37 @@ pub fn validate_spec( result .errors .push("Frontmatter missing required field: status".to_string()); + result.fixes.push( + "Add `status: active` (or draft/stable/deprecated) to the frontmatter".to_string(), + ); + } else if let Some(status_str) = &fm.status { + if fm.parsed_status().is_none() { + result.warnings.push(format!( + "Unknown status '{}' — expected one of: draft, active, stable, deprecated", + status_str + )); + } + } + + // Status lifecycle: deprecated specs emit a warning + let spec_status = fm.parsed_status(); + if spec_status == Some(crate::types::SpecStatus::Deprecated) { result - .fixes - .push("Add `status: active` (or draft/deprecated) to the frontmatter".to_string()); + .warnings + .push("Spec is deprecated — consider removing or archiving".to_string()); + } + + // Validate agent_policy if present + if let Some(policy) = &fm.agent_policy { + match policy.as_str() { + "read-only" | "suggest-only" | "full-access" => {} + _ => { + result.warnings.push(format!( + "Unknown agent_policy '{}' — expected: read-only, suggest-only, or full-access", + policy + )); + } + } } if fm.files.is_empty() { result.errors.push( @@ -291,9 +327,13 @@ pub fn validate_spec( } } - // Required markdown sections + // Required markdown sections (drafts skip "Public API" requirement) + let is_draft = spec_status == Some(crate::types::SpecStatus::Draft); let missing = get_missing_sections(body, &config.required_sections); for section in &missing { + if is_draft && section == "Public API" { + continue; // drafts can skip Public API + } result .errors .push(format!("Missing required section: ## {section}")); @@ -303,8 +343,9 @@ pub fn validate_spec( } // ─── Level 2: API Surface ───────────────────────────────────────── + // Draft specs skip API surface validation — exports may not exist yet. - if !fm.files.is_empty() { + if !fm.files.is_empty() && !is_draft { let mut all_exports: Vec = Vec::new(); for file in &fm.files { let full_path = root.join(file); @@ -372,11 +413,9 @@ pub fn validate_spec( } // Check Consumed By section references - let consumed_re = Regex::new(r"(?s)### Consumed By\s*\n(.*?)(?:\n## |\n### |$)").unwrap(); - if let Some(caps) = consumed_re.captures(body) { + if let Some(caps) = CONSUMED_BY_RE.captures(body) { let section = caps.get(1).unwrap().as_str(); - let file_ref_re = Regex::new(r"\|\s*`([^`]+\.\w+)`\s*\|").unwrap(); - for caps in file_ref_re.captures_iter(section) { + for caps in FILE_REF_RE.captures_iter(section) { if let Some(file_ref) = caps.get(1) { let file_path = root.join(file_ref.as_str()); if !file_path.exists() { @@ -389,9 +428,120 @@ pub fn validate_spec( } } + // ─── Custom Validation Rules ───────────────────────────────────── + apply_custom_rules(spec_path, body, &fm.depends_on, config, &mut result); + result } +/// Apply project-specific custom validation rules from config. +fn apply_custom_rules( + spec_path: &Path, + body: &str, + depends_on: &[String], + config: &SpecSyncConfig, + result: &mut ValidationResult, +) { + let rules = &config.rules; + + // max_spec_size_kb: warn if spec file is too large + if let Some(max_kb) = rules.max_spec_size_kb { + if let Ok(meta) = fs::metadata(spec_path) { + let size_kb = meta.len() as usize / 1024; + if size_kb > max_kb { + result.warnings.push(format!( + "Spec file is {size_kb} KB — exceeds limit of {max_kb} KB" + )); + } + } + } + + // max_changelog_entries: warn if Change Log has too many rows + if let Some(max_entries) = rules.max_changelog_entries { + let count = count_changelog_entries(body); + if count > max_entries { + result.warnings.push(format!( + "Change Log has {count} entries — exceeds limit of {max_entries} (run `specsync compact`)" + )); + } + } + + // require_behavioral_examples: require at least one ### Scenario + if rules.require_behavioral_examples == Some(true) { + let scenario_count = body.matches("### Scenario").count(); + if scenario_count == 0 { + result.errors.push( + "No behavioral examples found (rule: require_behavioral_examples)".to_string(), + ); + result.fixes.push( + "Add at least one `### Scenario:` under `## Behavioral Examples`".to_string(), + ); + } + } + + // min_invariants: require a minimum number of numbered invariants + if let Some(min) = rules.min_invariants { + let count = count_invariants(body); + if count < min { + result.warnings.push(format!( + "Only {count} invariant(s) found — minimum is {min}" + )); + } + } + + // require_depends_on: require non-empty depends_on in frontmatter + if rules.require_depends_on == Some(true) && depends_on.is_empty() { + result + .warnings + .push("No consumed dependencies documented (rule: require_depends_on)".to_string()); + } +} + +/// Count data rows in the Change Log table (excluding header and separator). +fn count_changelog_entries(body: &str) -> usize { + let changelog_start = match body.find("## Change Log") { + Some(pos) => pos, + None => return 0, + }; + let section = &body[changelog_start..]; + // Find next ## heading to bound the section + let section_end = section[1..] + .find("\n## ") + .map(|p| p + 1) + .unwrap_or(section.len()); + let section = §ion[..section_end]; + + // Count data rows: skip the first two table lines (header + separator) + let mut table_line_count = 0usize; + section + .lines() + .filter(|line| { + let trimmed = line.trim(); + if !trimmed.starts_with('|') { + return false; + } + table_line_count += 1; + table_line_count > 2 + }) + .count() +} + +/// Count numbered invariants in the Invariants section. +fn count_invariants(body: &str) -> usize { + let inv_start = match body.find("## Invariants") { + Some(pos) => pos, + None => return 0, + }; + let section = &body[inv_start..]; + let section_end = section[1..] + .find("\n## ") + .map(|p| p + 1) + .unwrap_or(section.len()); + let section = §ion[..section_end]; + + NUMBERED_RE.find_iter(section).count() +} + /// Suggest a similar file path when a referenced file doesn't exist. fn suggest_similar_file(root: &Path, missing_file: &str) -> Option { let missing_name = Path::new(missing_file).file_name()?.to_str()?; diff --git a/src/view.rs b/src/view.rs new file mode 100644 index 0000000..bc80e52 --- /dev/null +++ b/src/view.rs @@ -0,0 +1,173 @@ +use std::fs; +use std::path::Path; + +use crate::parser::parse_frontmatter; + +/// Sections visible to each role. +fn sections_for_role(role: &str) -> Option> { + match role { + "dev" => Some(vec![ + "Purpose", + "Public API", + "Invariants", + "Dependencies", + "Change Log", + ]), + "qa" => Some(vec!["Behavioral Examples", "Error Cases", "Invariants"]), + "product" => Some(vec!["Purpose", "Change Log"]), + "agent" => Some(vec![ + "Purpose", + "Public API", + "Invariants", + "Behavioral Examples", + "Error Cases", + ]), + _ => None, + } +} + +/// All supported role names. +pub fn valid_roles() -> &'static [&'static str] { + &["dev", "qa", "product", "agent"] +} + +/// Filter a spec file to show only sections relevant to a given role. +/// Returns the filtered markdown content. +pub fn view_spec(spec_path: &Path, role: &str) -> Result { + let allowed = sections_for_role(role).ok_or_else(|| { + format!( + "Unknown role '{}' — valid roles: {}", + role, + valid_roles().join(", ") + ) + })?; + + let content = fs::read_to_string(spec_path) + .map_err(|e| format!("Cannot read {}: {e}", spec_path.display()))?; + + let parsed = + parse_frontmatter(&content).ok_or_else(|| "Cannot parse frontmatter".to_string())?; + + let fm = &parsed.frontmatter; + let body = &parsed.body; + + let mut output = String::new(); + + // Header with module name and role context + if let Some(module) = &fm.module { + output.push_str(&format!("# {} (view: {role})\n\n", module)); + } + + // Show status and agent_policy for agent role + if role == "agent" { + if let Some(status) = &fm.status { + output.push_str(&format!("**Status:** {status}\n")); + } + if let Some(policy) = &fm.agent_policy { + output.push_str(&format!("**Agent Policy:** {policy}\n")); + } else { + output.push_str("**Agent Policy:** not set (default: full-access)\n"); + } + output.push('\n'); + } + + // For product role, also show requirements companion if it exists + if role == "product" { + if let Some(parent) = spec_path.parent() { + let req_path = parent.join("requirements.md"); + if req_path.exists() { + if let Ok(req_content) = fs::read_to_string(&req_path) { + // Strip frontmatter from requirements.md + let req_body = strip_frontmatter(&req_content); + if !req_body.trim().is_empty() { + output.push_str("## Requirements\n\n"); + output.push_str(req_body.trim()); + output.push_str("\n\n"); + } + } + } + } + } + + // Split body into sections and filter + let sections = split_sections(body); + for (heading, content) in §ions { + if allowed.iter().any(|a| heading.contains(a)) { + output.push_str(&format!("## {heading}\n")); + output.push_str(content); + output.push('\n'); + } + } + + Ok(output) +} + +/// Split markdown body into (heading_text, section_content) pairs. +/// Only splits on `## ` level headings. +fn split_sections(body: &str) -> Vec<(String, String)> { + let mut sections = Vec::new(); + let mut current_heading: Option = None; + let mut current_content = String::new(); + + for line in body.lines() { + if let Some(heading) = line.strip_prefix("## ") { + // Flush previous section + if let Some(h) = current_heading.take() { + sections.push((h, current_content.clone())); + current_content.clear(); + } + current_heading = Some(heading.trim().to_string()); + } else if current_heading.is_some() { + current_content.push_str(line); + current_content.push('\n'); + } + } + + // Flush last section + if let Some(h) = current_heading { + sections.push((h, current_content)); + } + + sections +} + +/// Strip YAML frontmatter from a markdown file. +fn strip_frontmatter(content: &str) -> &str { + if content.starts_with("---\n") { + if let Some(end) = content[4..].find("\n---\n") { + return &content[end + 8..]; // skip past closing ---\n + } + } + content +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_sections_for_role() { + assert!(sections_for_role("dev").unwrap().contains(&"Public API")); + assert!(sections_for_role("qa").unwrap().contains(&"Error Cases")); + assert!(sections_for_role("product").unwrap().contains(&"Purpose")); + assert!(sections_for_role("agent").unwrap().contains(&"Invariants")); + assert!(sections_for_role("unknown").is_none()); + } + + #[test] + fn test_split_sections() { + let body = "## Purpose\n\nDoes things.\n\n## Public API\n\n| Fn | Desc |\n\n## Change Log\n\n| Date | Change |\n"; + let sections = split_sections(body); + assert_eq!(sections.len(), 3); + assert_eq!(sections[0].0, "Purpose"); + assert_eq!(sections[1].0, "Public API"); + assert_eq!(sections[2].0, "Change Log"); + } + + #[test] + fn test_strip_frontmatter() { + let content = "---\nmodule: test\n---\n\n## Purpose\n"; + let result = strip_frontmatter(content); + assert!(result.contains("## Purpose")); + } +} diff --git a/src/watch.rs b/src/watch.rs index 42cde40..a1f8573 100644 --- a/src/watch.rs +++ b/src/watch.rs @@ -9,6 +9,7 @@ use notify_debouncer_full::{DebouncedEvent, new_debouncer}; use crate::config::load_config; /// Run the check command in watch mode, re-running on file changes. +/// Uses the hash cache to skip unchanged specs on subsequent runs. pub fn run_watch(root: &Path, strict: bool, require_coverage: Option) { let config = load_config(root); let specs_dir = root.join(&config.specs_dir); @@ -35,9 +36,9 @@ pub fn run_watch(root: &Path, strict: bool, require_coverage: Option) { std::process::exit(1); } - // Initial run + // Initial run with --force to validate everything print_separator(None); - run_check(root, strict, require_coverage); + run_check(root, strict, require_coverage, true); // Set up debounced file watcher let (tx, rx) = mpsc::channel(); @@ -76,6 +77,17 @@ pub fn run_watch(root: &Path, strict: bool, require_coverage: Option) { .collect::>() .join(", ") ); + if strict { + println!( + "{} Strict mode active — all specs will be re-validated on each run", + ">>>".cyan() + ); + } else { + println!( + "{} Hash cache active — only changed specs will be re-validated", + ">>>".cyan() + ); + } println!("{} Press Ctrl+C to stop\n", ">>>".cyan()); // Event loop @@ -101,7 +113,8 @@ pub fn run_watch(root: &Path, strict: bool, require_coverage: Option) { while rx.try_recv().is_ok() {} print_separator(changed_file.as_deref()); - run_check(root, strict, require_coverage); + // Subsequent runs use hash cache (no --force), only re-validating changed specs + run_check(root, strict, require_coverage, false); last_run = Instant::now(); println!( @@ -129,7 +142,7 @@ fn print_separator(changed_file: Option<&str>) { if let Some(file) = changed_file { println!("{} Changed: {}", ">>>".cyan(), file.bold()); } else { - println!("{} Initial run", ">>>".cyan()); + println!("{} Initial run (full validation)", ">>>".cyan()); } println!( "{}", @@ -137,26 +150,39 @@ fn print_separator(changed_file: Option<&str>) { ); } -fn run_check(root: &Path, strict: bool, require_coverage: Option) { +fn run_check(root: &Path, strict: bool, require_coverage: Option, force: bool) { // Fork a child process to isolate exit calls from the check command. use std::process::Command; + let start = Instant::now(); let mut cmd = Command::new(std::env::current_exe().expect("Cannot find current executable")); cmd.arg("check"); cmd.arg("--root").arg(root); if strict { cmd.arg("--strict"); } + if force { + cmd.arg("--force"); + } if let Some(cov) = require_coverage { cmd.arg("--require-coverage").arg(cov.to_string()); } match cmd.status() { Ok(status) => { + let elapsed = start.elapsed(); if status.success() { - println!("\n{}", "All checks passed!".green().bold()); + println!( + "\n{} ({}ms)", + "All checks passed!".green().bold(), + elapsed.as_millis() + ); } else { - println!("\n{}", "Some checks failed.".red().bold()); + println!( + "\n{} ({}ms)", + "Some checks failed.".red().bold(), + elapsed.as_millis() + ); } } Err(e) => {