From 5160ed5ffe224f1944e583184409b8fb1c890ddc Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 22:47:09 +0800 Subject: [PATCH 001/205] feat(resource): add ResourceKind interface, SkillKind/AgentKind implementations, and reserved name validation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Create internal/resource/ package with: - kind.go: ResourceKind interface (Discover, ResolveName, FlatName, CreateLink, feature gates) + DiscoveredResource struct - skill.go: SkillKind — directory-based discovery via SKILL.md, frontmatter name resolution, nested separator flatten (a/b → a__b) - agent.go: AgentKind — single-file .md discovery with conventional excludes (README/LICENSE/CHANGELOG/SKILL.md), filename-based naming with frontmatter override, directory-strip flatten (dir/f.md → f.md) Add "agents" to reservedSkillNames in validate.SkillName() to prevent skill names conflicting with the agents source directory. 16 unit tests covering discovery, nested paths, name resolution, flat name computation, feature gates, and edge cases. --- internal/resource/agent.go | 118 +++++++++++ internal/resource/kind.go | 59 ++++++ internal/resource/kind_test.go | 320 +++++++++++++++++++++++++++++ internal/resource/skill.go | 100 +++++++++ internal/validate/validate.go | 11 + internal/validate/validate_test.go | 25 +++ 6 files changed, 633 insertions(+) create mode 100644 internal/resource/agent.go create mode 100644 internal/resource/kind.go create mode 100644 internal/resource/kind_test.go create mode 100644 internal/resource/skill.go diff --git a/internal/resource/agent.go b/internal/resource/agent.go new file mode 100644 index 00000000..689ac895 --- /dev/null +++ b/internal/resource/agent.go @@ -0,0 +1,118 @@ +package resource + +import ( + "os" + "path/filepath" + "strings" + + "skillshare/internal/utils" +) + +// AgentKind handles single-file .md agent resources. +type AgentKind struct{} + +var _ ResourceKind = AgentKind{} + +func (AgentKind) Kind() string { return "agent" } + +// Discover scans sourceDir for .md files, excluding conventional files +// (README.md, LICENSE.md, etc.) and hidden files. +func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { + walkRoot := utils.ResolveSymlink(sourceDir) + + var resources []DiscoveredResource + + err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + + if info.IsDir() { + if info.Name() == ".git" || utils.IsHidden(info.Name()) && info.Name() != "." { + return filepath.SkipDir + } + return nil + } + + // Only .md files + if !strings.HasSuffix(strings.ToLower(info.Name()), ".md") { + return nil + } + + // Skip conventional excludes + if ConventionalExcludes[info.Name()] { + return nil + } + + // Skip hidden files + if utils.IsHidden(info.Name()) { + return nil + } + + relPath, relErr := filepath.Rel(walkRoot, path) + if relErr != nil { + return nil + } + relPath = strings.ReplaceAll(relPath, "\\", "/") + + name := agentNameFromFile(path, info.Name()) + + isNested := strings.Contains(relPath, "/") + + resources = append(resources, DiscoveredResource{ + Name: name, + Kind: "agent", + RelPath: relPath, + AbsPath: path, + IsNested: isNested, + FlatName: AgentFlatName(relPath), + SourcePath: filepath.Join(sourceDir, relPath), + }) + + return nil + }) + + if err != nil { + return nil, err + } + + return resources, nil +} + +// agentNameFromFile resolves an agent name. Checks frontmatter name field +// first, falls back to filename without .md extension. +func agentNameFromFile(filePath, fileName string) string { + name := utils.ParseFrontmatterField(filePath, "name") + if name != "" { + return name + } + return strings.TrimSuffix(fileName, ".md") +} + +// ResolveName extracts the agent name from an .md file. +// Checks frontmatter name field first, falls back to filename. +func (AgentKind) ResolveName(path string) string { + return agentNameFromFile(path, filepath.Base(path)) +} + +// FlatName strips directory prefixes, keeping only the filename. +// Example: "curriculum/math-tutor.md" → "math-tutor.md" +func (AgentKind) FlatName(relPath string) string { + return AgentFlatName(relPath) +} + +// AgentFlatName is the standalone flat name computation for agents. +// Strips directory prefixes, keeping only the filename. +func AgentFlatName(relPath string) string { + relPath = strings.ReplaceAll(relPath, "\\", "/") + return filepath.Base(relPath) +} + +// CreateLink creates a file symlink from dst pointing to src. +func (AgentKind) CreateLink(src, dst string) error { + return os.Symlink(src, dst) +} + +func (AgentKind) SupportsAudit() bool { return true } +func (AgentKind) SupportsTrack() bool { return true } +func (AgentKind) SupportsCollect() bool { return true } diff --git a/internal/resource/kind.go b/internal/resource/kind.go new file mode 100644 index 00000000..1df12e90 --- /dev/null +++ b/internal/resource/kind.go @@ -0,0 +1,59 @@ +package resource + +// ResourceKind encapsulates per-kind behavior for skills and agents. +// Each kind defines how resources are discovered, named, linked, and validated. +type ResourceKind interface { + // Kind returns the resource kind identifier ("skill" or "agent"). + Kind() string + + // Discover scans sourceDir and returns all resources found. + Discover(sourceDir string) ([]DiscoveredResource, error) + + // ResolveName extracts the canonical name from a resource at the given path. + // For skills: reads SKILL.md frontmatter name field. + // For agents: uses filename, with optional frontmatter name override. + ResolveName(path string) string + + // FlatName computes the flattened name used in target directories. + // For skills: path/to/skill → path__to__skill (nested separator). + // For agents: dir/file.md → file.md (directory prefix stripped). + FlatName(relPath string) string + + // CreateLink creates a symlink from dst pointing to src. + // For skills: directory symlink. For agents: file symlink. + // Both use os.Symlink; the distinction is semantic (unit shape). + CreateLink(src, dst string) error + + // SupportsAudit reports whether this kind supports security audit scanning. + SupportsAudit() bool + + // SupportsTrack reports whether this kind supports tracked repo updates. + SupportsTrack() bool + + // SupportsCollect reports whether this kind supports collecting from targets. + SupportsCollect() bool +} + +// DiscoveredResource represents a resource found during source directory scan. +// Used for both skills and agents. +type DiscoveredResource struct { + Name string // Canonical name (from frontmatter or filename) + Kind string // "skill" or "agent" + RelPath string // Relative path from source root + AbsPath string // Full absolute path + IsNested bool // Whether this resource is inside a subdirectory + FlatName string // Flattened name for target directories + IsInRepo bool // Whether this resource is inside a tracked repo + Disabled bool // Whether this resource is ignored by ignore file + SourcePath string // Full path preserving caller's logical path (may differ from AbsPath if symlinked) +} + +// ConventionalExcludes are filenames excluded from agent discovery. +var ConventionalExcludes = map[string]bool{ + "README.md": true, + "CHANGELOG.md": true, + "LICENSE.md": true, + "HISTORY.md": true, + "SECURITY.md": true, + "SKILL.md": true, +} diff --git a/internal/resource/kind_test.go b/internal/resource/kind_test.go new file mode 100644 index 00000000..a3ceb43e --- /dev/null +++ b/internal/resource/kind_test.go @@ -0,0 +1,320 @@ +package resource + +import ( + "os" + "path/filepath" + "testing" +) + +// --- SkillKind tests --- + +func TestSkillKind_Kind(t *testing.T) { + k := SkillKind{} + if k.Kind() != "skill" { + t.Errorf("SkillKind.Kind() = %q, want %q", k.Kind(), "skill") + } +} + +func TestSkillKind_Discover(t *testing.T) { + dir := t.TempDir() + + // Create two skills + os.MkdirAll(filepath.Join(dir, "my-skill"), 0o755) + os.WriteFile(filepath.Join(dir, "my-skill", "SKILL.md"), []byte("---\nname: my-skill\n---\n# Content"), 0o644) + + os.MkdirAll(filepath.Join(dir, "another"), 0o755) + os.WriteFile(filepath.Join(dir, "another", "SKILL.md"), []byte("---\nname: another\n---\n# Content"), 0o644) + + // Non-skill directory (no SKILL.md) + os.MkdirAll(filepath.Join(dir, "not-a-skill"), 0o755) + os.WriteFile(filepath.Join(dir, "not-a-skill", "README.md"), []byte("# Readme"), 0o644) + + k := SkillKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 2 { + t.Fatalf("expected 2 resources, got %d", len(resources)) + } + + names := map[string]bool{} + for _, r := range resources { + names[r.Name] = true + if r.Kind != "skill" { + t.Errorf("resource %q has Kind=%q, want %q", r.Name, r.Kind, "skill") + } + } + + if !names["my-skill"] { + t.Error("expected to discover 'my-skill'") + } + if !names["another"] { + t.Error("expected to discover 'another'") + } +} + +func TestSkillKind_Discover_Nested(t *testing.T) { + dir := t.TempDir() + + os.MkdirAll(filepath.Join(dir, "_team", "frontend", "ui"), 0o755) + os.WriteFile(filepath.Join(dir, "_team", "frontend", "ui", "SKILL.md"), []byte("---\nname: ui\n---\n"), 0o644) + + k := SkillKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 1 { + t.Fatalf("expected 1 resource, got %d", len(resources)) + } + + r := resources[0] + if r.Name != "ui" { + t.Errorf("Name = %q, want %q", r.Name, "ui") + } + if r.FlatName != "_team__frontend__ui" { + t.Errorf("FlatName = %q, want %q", r.FlatName, "_team__frontend__ui") + } + if !r.IsNested { + t.Error("expected IsNested=true for nested skill") + } + if !r.IsInRepo { + t.Error("expected IsInRepo=true for _-prefixed dir") + } +} + +func TestSkillKind_ResolveName_FromFrontmatter(t *testing.T) { + dir := t.TempDir() + skillDir := filepath.Join(dir, "my-skill") + os.MkdirAll(skillDir, 0o755) + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\nname: custom-name\n---\n"), 0o644) + + k := SkillKind{} + name := k.ResolveName(skillDir) + if name != "custom-name" { + t.Errorf("ResolveName = %q, want %q", name, "custom-name") + } +} + +func TestSkillKind_ResolveName_FallbackToDirName(t *testing.T) { + dir := t.TempDir() + skillDir := filepath.Join(dir, "fallback-skill") + os.MkdirAll(skillDir, 0o755) + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\n---\n"), 0o644) + + k := SkillKind{} + name := k.ResolveName(skillDir) + if name != "fallback-skill" { + t.Errorf("ResolveName = %q, want %q", name, "fallback-skill") + } +} + +func TestSkillKind_FlatName(t *testing.T) { + k := SkillKind{} + + tests := []struct { + relPath string + want string + }{ + {"my-skill", "my-skill"}, + {"_team/frontend/ui", "_team__frontend__ui"}, + } + + for _, tt := range tests { + got := k.FlatName(tt.relPath) + if got != tt.want { + t.Errorf("FlatName(%q) = %q, want %q", tt.relPath, got, tt.want) + } + } +} + +func TestSkillKind_FeatureGates(t *testing.T) { + k := SkillKind{} + if !k.SupportsAudit() { + t.Error("SkillKind should support audit") + } + if !k.SupportsTrack() { + t.Error("SkillKind should support track") + } + if !k.SupportsCollect() { + t.Error("SkillKind should support collect") + } +} + +// --- AgentKind tests --- + +func TestAgentKind_Kind(t *testing.T) { + k := AgentKind{} + if k.Kind() != "agent" { + t.Errorf("AgentKind.Kind() = %q, want %q", k.Kind(), "agent") + } +} + +func TestAgentKind_Discover(t *testing.T) { + dir := t.TempDir() + + // Create agent files + os.WriteFile(filepath.Join(dir, "tutor.md"), []byte("# Tutor agent"), 0o644) + os.WriteFile(filepath.Join(dir, "reviewer.md"), []byte("# Reviewer agent"), 0o644) + + // Conventional excludes should be skipped + os.WriteFile(filepath.Join(dir, "README.md"), []byte("# Readme"), 0o644) + os.WriteFile(filepath.Join(dir, "LICENSE.md"), []byte("# License"), 0o644) + os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("---\nname: test\n---\n"), 0o644) + + // Non-.md files should be skipped + os.WriteFile(filepath.Join(dir, "config.yaml"), []byte("key: value"), 0o644) + + // Hidden files should be skipped + os.WriteFile(filepath.Join(dir, ".hidden.md"), []byte("# Hidden"), 0o644) + + k := AgentKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 2 { + t.Fatalf("expected 2 resources, got %d: %v", len(resources), resources) + } + + names := map[string]bool{} + for _, r := range resources { + names[r.Name] = true + if r.Kind != "agent" { + t.Errorf("resource %q has Kind=%q, want %q", r.Name, r.Kind, "agent") + } + } + + if !names["tutor"] { + t.Error("expected to discover 'tutor'") + } + if !names["reviewer"] { + t.Error("expected to discover 'reviewer'") + } +} + +func TestAgentKind_Discover_Nested(t *testing.T) { + dir := t.TempDir() + + os.MkdirAll(filepath.Join(dir, "curriculum"), 0o755) + os.WriteFile(filepath.Join(dir, "curriculum", "math-tutor.md"), []byte("# Math tutor"), 0o644) + + k := AgentKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 1 { + t.Fatalf("expected 1 resource, got %d", len(resources)) + } + + r := resources[0] + if r.Name != "math-tutor" { + t.Errorf("Name = %q, want %q", r.Name, "math-tutor") + } + if r.RelPath != "curriculum/math-tutor.md" { + t.Errorf("RelPath = %q, want %q", r.RelPath, "curriculum/math-tutor.md") + } + if r.FlatName != "math-tutor.md" { + t.Errorf("FlatName = %q, want %q", r.FlatName, "math-tutor.md") + } + if !r.IsNested { + t.Error("expected IsNested=true for nested agent") + } +} + +func TestAgentKind_ResolveName_FromFilename(t *testing.T) { + dir := t.TempDir() + agentFile := filepath.Join(dir, "tutor.md") + os.WriteFile(agentFile, []byte("# Tutor agent"), 0o644) + + k := AgentKind{} + name := k.ResolveName(agentFile) + if name != "tutor" { + t.Errorf("ResolveName = %q, want %q", name, "tutor") + } +} + +func TestAgentKind_ResolveName_FromFrontmatter(t *testing.T) { + dir := t.TempDir() + agentFile := filepath.Join(dir, "tutor.md") + os.WriteFile(agentFile, []byte("---\nname: curriculum-tutor\n---\n# Tutor"), 0o644) + + k := AgentKind{} + name := k.ResolveName(agentFile) + if name != "curriculum-tutor" { + t.Errorf("ResolveName = %q, want %q", name, "curriculum-tutor") + } +} + +func TestAgentKind_FlatName(t *testing.T) { + k := AgentKind{} + + tests := []struct { + relPath string + want string + }{ + {"tutor.md", "tutor.md"}, + {"curriculum/math-tutor.md", "math-tutor.md"}, + {"a/b/deep.md", "deep.md"}, + } + + for _, tt := range tests { + got := k.FlatName(tt.relPath) + if got != tt.want { + t.Errorf("FlatName(%q) = %q, want %q", tt.relPath, got, tt.want) + } + } +} + +func TestAgentKind_FeatureGates(t *testing.T) { + k := AgentKind{} + if !k.SupportsAudit() { + t.Error("AgentKind should support audit") + } + if !k.SupportsTrack() { + t.Error("AgentKind should support track") + } + if !k.SupportsCollect() { + t.Error("AgentKind should support collect") + } +} + +func TestAgentKind_Discover_EmptyDir(t *testing.T) { + dir := t.TempDir() + + k := AgentKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + if len(resources) != 0 { + t.Errorf("expected 0 resources, got %d", len(resources)) + } +} + +func TestAgentKind_Discover_SkipsGitDir(t *testing.T) { + dir := t.TempDir() + + os.MkdirAll(filepath.Join(dir, ".git"), 0o755) + os.WriteFile(filepath.Join(dir, ".git", "config.md"), []byte("# git config"), 0o644) + os.WriteFile(filepath.Join(dir, "real-agent.md"), []byte("# Agent"), 0o644) + + k := AgentKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 1 { + t.Fatalf("expected 1 resource, got %d", len(resources)) + } + if resources[0].Name != "real-agent" { + t.Errorf("Name = %q, want %q", resources[0].Name, "real-agent") + } +} diff --git a/internal/resource/skill.go b/internal/resource/skill.go new file mode 100644 index 00000000..a16c85db --- /dev/null +++ b/internal/resource/skill.go @@ -0,0 +1,100 @@ +package resource + +import ( + "os" + "path/filepath" + "strings" + + "skillshare/internal/utils" +) + +// SkillKind handles directory-based skill resources identified by SKILL.md. +type SkillKind struct{} + +var _ ResourceKind = SkillKind{} + +func (SkillKind) Kind() string { return "skill" } + +// Discover scans sourceDir for directories containing SKILL.md. +// This is a simplified discovery for the resource package; the full +// discovery with ignore support, frontmatter parsing, and context +// collection remains in internal/sync/discover_walk.go. +func (SkillKind) Discover(sourceDir string) ([]DiscoveredResource, error) { + walkRoot := utils.ResolveSymlink(sourceDir) + + var resources []DiscoveredResource + + err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + + if info.IsDir() && info.Name() == ".git" { + return filepath.SkipDir + } + + if !info.IsDir() && info.Name() == "SKILL.md" { + skillDir := filepath.Dir(path) + relPath, relErr := filepath.Rel(walkRoot, skillDir) + if relErr != nil || relPath == "." { + return nil + } + relPath = strings.ReplaceAll(relPath, "\\", "/") + + name := utils.ParseFrontmatterField(filepath.Join(skillDir, "SKILL.md"), "name") + if name == "" { + name = filepath.Base(skillDir) + } + + isInRepo := false + parts := strings.Split(relPath, "/") + if len(parts) > 0 && utils.IsTrackedRepoDir(parts[0]) { + isInRepo = true + } + + resources = append(resources, DiscoveredResource{ + Name: name, + Kind: "skill", + RelPath: relPath, + AbsPath: skillDir, + IsNested: strings.Contains(relPath, "/"), + FlatName: utils.PathToFlatName(relPath), + IsInRepo: isInRepo, + SourcePath: filepath.Join(sourceDir, relPath), + }) + } + + return nil + }) + + if err != nil { + return nil, err + } + + return resources, nil +} + +// ResolveName reads the name field from SKILL.md frontmatter. +// Falls back to directory base name if frontmatter has no name. +func (SkillKind) ResolveName(path string) string { + skillFile := filepath.Join(path, "SKILL.md") + name := utils.ParseFrontmatterField(skillFile, "name") + if name != "" { + return name + } + return filepath.Base(path) +} + +// FlatName converts a relative path to a flat name using __ separator. +func (SkillKind) FlatName(relPath string) string { + return utils.PathToFlatName(relPath) +} + +// CreateLink creates a directory symlink from dst pointing to src. +func (SkillKind) CreateLink(src, dst string) error { + return os.Symlink(src, dst) +} + +func (SkillKind) SupportsAudit() bool { return true } +func (SkillKind) SupportsTrack() bool { return true } +func (SkillKind) SupportsCollect() bool { return true } diff --git a/internal/validate/validate.go b/internal/validate/validate.go index b2e56d3c..273d21f6 100644 --- a/internal/validate/validate.go +++ b/internal/validate/validate.go @@ -54,11 +54,16 @@ func TargetName(name string) error { return nil } +// reservedSkillNames are names that cannot be used as skill names +// because they conflict with resource kind directories or commands. +var reservedSkillNames = []string{"agents"} + // SkillName validates a skill name. // Rules: // - Must start with a letter or number // - Can contain letters, numbers, underscores, and hyphens // - Length 1-64 characters +// - Cannot be a reserved skill name (e.g. "agents") func SkillName(name string) error { if name == "" { return fmt.Errorf("skill name cannot be empty") @@ -72,6 +77,12 @@ func SkillName(name string) error { return fmt.Errorf("skill name must start with a letter or number and contain only letters, numbers, underscores, and hyphens") } + for _, r := range reservedSkillNames { + if strings.EqualFold(name, r) { + return fmt.Errorf("'%s' is a reserved name and cannot be used as a skill name", name) + } + } + return nil } diff --git a/internal/validate/validate_test.go b/internal/validate/validate_test.go index 46a04b37..49c2a728 100644 --- a/internal/validate/validate_test.go +++ b/internal/validate/validate_test.go @@ -44,6 +44,31 @@ func TestTargetName(t *testing.T) { } } +func TestSkillName(t *testing.T) { + tests := []struct { + name string + input string + wantErr bool + }{ + {"valid", "my-skill", false}, + {"valid with number", "skill2", false}, + {"empty", "", true}, + {"too long", strings.Repeat("a", 65), true}, + {"reserved agents", "agents", true}, + {"reserved agents uppercase", "Agents", true}, + {"starts with special", "-skill", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := SkillName(tt.input) + if (err != nil) != tt.wantErr { + t.Errorf("SkillName(%q) error = %v, wantErr %v", tt.input, err, tt.wantErr) + } + }) + } +} + func TestPath(t *testing.T) { tests := []struct { name string From c049ccdae0095592300c7a1be2a1d84f0cbe1832 Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 22:55:20 +0800 Subject: [PATCH 002/205] feat(config): add agents_source field and create agents directory on init Add AgentsSource field to Config struct with EffectiveAgentsSource() method that defaults to ~/.config/skillshare/agents when not set. Global init now creates agents source directory alongside skills source. Project init creates .skillshare/agents/ alongside .skillshare/skills/. projectRuntime gains agentsSourcePath field for project-mode agent ops. Update config.schema.json with agents_source property definition. --- cmd/skillshare/init.go | 6 +++++- cmd/skillshare/init_project.go | 3 +++ cmd/skillshare/project_runtime.go | 22 ++++++++++++---------- internal/config/basedir_test.go | 20 ++++++++++++++++++++ internal/config/config.go | 10 ++++++++++ schemas/config.schema.json | 6 ++++++ 6 files changed, 56 insertions(+), 11 deletions(-) diff --git a/cmd/skillshare/init.go b/cmd/skillshare/init.go index ab68ce30..4066598a 100644 --- a/cmd/skillshare/init.go +++ b/cmd/skillshare/init.go @@ -377,10 +377,14 @@ func performFreshInit(opts *initOptions, home string) error { ui.Warning("Dry run mode - no changes will be made") } - // Create source directory if needed + // Create source directories if needed if err := createSourceDir(sourcePath, opts.dryRun); err != nil { return err } + agentsSourcePath := filepath.Join(filepath.Dir(sourcePath), "agents") + if err := createSourceDir(agentsSourcePath, opts.dryRun); err != nil { + return err + } // Copy skills from selected directory if copyFromPath != "" { diff --git a/cmd/skillshare/init_project.go b/cmd/skillshare/init_project.go index 55c8fa91..01464e7e 100644 --- a/cmd/skillshare/init_project.go +++ b/cmd/skillshare/init_project.go @@ -216,6 +216,9 @@ func performProjectInit(root string, opts projectInitOptions) error { if err := os.MkdirAll(filepath.Join(root, ".skillshare", "skills"), 0755); err != nil { return fmt.Errorf("failed to create .skillshare/skills: %w", err) } + if err := os.MkdirAll(filepath.Join(root, ".skillshare", "agents"), 0755); err != nil { + return fmt.Errorf("failed to create .skillshare/agents: %w", err) + } if err := ensureProjectGitignore(root, opts.configMode == "local"); err != nil { return err diff --git a/cmd/skillshare/project_runtime.go b/cmd/skillshare/project_runtime.go index c00f666e..60940eda 100644 --- a/cmd/skillshare/project_runtime.go +++ b/cmd/skillshare/project_runtime.go @@ -7,11 +7,12 @@ import ( ) type projectRuntime struct { - root string - config *config.ProjectConfig - registry *config.Registry - sourcePath string - targets map[string]config.TargetConfig + root string + config *config.ProjectConfig + registry *config.Registry + sourcePath string + agentsSourcePath string + targets map[string]config.TargetConfig } func loadProjectRuntime(root string) (*projectRuntime, error) { @@ -31,10 +32,11 @@ func loadProjectRuntime(root string) (*projectRuntime, error) { } return &projectRuntime{ - root: root, - config: cfg, - registry: reg, - sourcePath: filepath.Join(root, ".skillshare", "skills"), - targets: targets, + root: root, + config: cfg, + registry: reg, + sourcePath: filepath.Join(root, ".skillshare", "skills"), + agentsSourcePath: filepath.Join(root, ".skillshare", "agents"), + targets: targets, }, nil } diff --git a/internal/config/basedir_test.go b/internal/config/basedir_test.go index 90a6dc03..356530f3 100644 --- a/internal/config/basedir_test.go +++ b/internal/config/basedir_test.go @@ -48,6 +48,26 @@ func TestConfigPath_RespectsXDGConfigHome(t *testing.T) { } } +func TestEffectiveAgentsSource_Default(t *testing.T) { + t.Setenv("XDG_CONFIG_HOME", "") + cfg := &Config{} + + got := cfg.EffectiveAgentsSource() + want := filepath.Join(BaseDir(), "agents") + if got != want { + t.Errorf("EffectiveAgentsSource() = %q, want %q", got, want) + } +} + +func TestEffectiveAgentsSource_Explicit(t *testing.T) { + cfg := &Config{AgentsSource: "/custom/agents"} + + got := cfg.EffectiveAgentsSource() + if got != "/custom/agents" { + t.Errorf("EffectiveAgentsSource() = %q, want %q", got, "/custom/agents") + } +} + func TestConfigPath_SKILLSHARECONFIGTakesPriority(t *testing.T) { t.Setenv("SKILLSHARE_CONFIG", "/override/config.yaml") t.Setenv("XDG_CONFIG_HOME", "/custom/config") diff --git a/internal/config/config.go b/internal/config/config.go index 5664d4dc..21c690e0 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -216,6 +216,7 @@ type ExtraConfig struct { // Config holds the application configuration type Config struct { Source string `yaml:"source"` + AgentsSource string `yaml:"agents_source,omitempty"` ExtrasSource string `yaml:"extras_source,omitempty"` Mode string `yaml:"mode,omitempty"` // default mode: merge TargetNaming string `yaml:"target_naming,omitempty"` @@ -233,6 +234,15 @@ type Config struct { RegistryDir string `yaml:"-"` } +// EffectiveAgentsSource returns the agents source directory. +// Defaults to /agents if not explicitly configured. +func (c *Config) EffectiveAgentsSource() string { + if c.AgentsSource != "" { + return ExpandPath(c.AgentsSource) + } + return filepath.Join(BaseDir(), "agents") +} + // EffectiveGitLabHosts returns GitLabHosts merged with SKILLSHARE_GITLAB_HOSTS env var. // Use this instead of accessing GitLabHosts directly for runtime behavior; // GitLabHosts contains only config-file values and is safe to persist via Save(). diff --git a/schemas/config.schema.json b/schemas/config.schema.json index f3fbb49a..f00364dd 100644 --- a/schemas/config.schema.json +++ b/schemas/config.schema.json @@ -13,6 +13,12 @@ "default": "~/.config/skillshare/skills", "examples": ["~/.config/skillshare/skills", "/home/user/my-skills"] }, + "agents_source": { + "type": "string", + "description": "Path to the agents source directory. Supports ~ for home directory. Defaults to ~/.config/skillshare/agents if not set.", + "default": "~/.config/skillshare/agents", + "examples": ["~/.config/skillshare/agents", "/home/user/my-agents"] + }, "mode": { "type": "string", "description": "Default sync mode for all targets.", From ea055de8e5eb8046541eae03aab816f39e191bfb Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 22:58:13 +0800 Subject: [PATCH 003/205] refactor(registry): rename SkillEntry to ResourceEntry and add unified registry load/save Rename SkillEntry to ResourceEntry with type alias for backward compat. All 56 call sites continue to compile without changes. Add LoadUnifiedRegistry() to merge separate skills and agents registry files for global mode (skills/registry.yaml + agents/registry.yaml). Agent entries automatically get Kind="agent" during merge. Add SaveSplitByKind() to split a unified registry back by kind for persisting to the correct source directory. Skips writing agents registry file when no agent entries exist. --- internal/config/project.go | 10 ++- internal/config/registry.go | 59 +++++++++++++++ internal/config/registry_test.go | 121 +++++++++++++++++++++++++++++++ 3 files changed, 187 insertions(+), 3 deletions(-) diff --git a/internal/config/project.go b/internal/config/project.go index f8404323..715c34c3 100644 --- a/internal/config/project.go +++ b/internal/config/project.go @@ -182,8 +182,9 @@ func (t *ProjectTargetEntry) EnsureSkills() *ResourceTargetConfig { return t.Skills } -// SkillEntry represents a remote skill entry in config (shared by global and project). -type SkillEntry struct { +// ResourceEntry represents a remote resource entry in config (shared by global and project). +// Used for both skills and agents. +type ResourceEntry struct { Name string `yaml:"name"` Kind string `yaml:"kind,omitempty"` Source string `yaml:"source"` @@ -192,9 +193,12 @@ type SkillEntry struct { Branch string `yaml:"branch,omitempty"` } +// SkillEntry is an alias for backward compatibility. +type SkillEntry = ResourceEntry + // EffectiveKind returns the resource kind for this entry. // Returns "skill" if Kind is empty (backward compatibility). -func (s SkillEntry) EffectiveKind() string { +func (s ResourceEntry) EffectiveKind() string { if s.Kind == "" { return "skill" } diff --git a/internal/config/registry.go b/internal/config/registry.go index 256ec7d5..a186ed93 100644 --- a/internal/config/registry.go +++ b/internal/config/registry.go @@ -127,3 +127,62 @@ func (r *Registry) Save(dir string) error { return nil } + +// LoadUnifiedRegistry merges registries from both skills and agents source +// directories into a single Registry. Used in global mode where skills and +// agents have separate registry files. +// Skills entries get Kind="" (backward compat), agent entries get Kind="agent". +func LoadUnifiedRegistry(skillsDir, agentsDir string) (*Registry, error) { + skillsReg, err := LoadRegistry(skillsDir) + if err != nil { + return nil, fmt.Errorf("failed to load skills registry: %w", err) + } + + agentsReg, err := LoadRegistry(agentsDir) + if err != nil { + return nil, fmt.Errorf("failed to load agents registry: %w", err) + } + + // Ensure agent entries have Kind set + for i := range agentsReg.Skills { + if agentsReg.Skills[i].Kind == "" { + agentsReg.Skills[i].Kind = "agent" + } + } + + unified := &Registry{ + Skills: make([]SkillEntry, 0, len(skillsReg.Skills)+len(agentsReg.Skills)), + } + unified.Skills = append(unified.Skills, skillsReg.Skills...) + unified.Skills = append(unified.Skills, agentsReg.Skills...) + + return unified, nil +} + +// SaveSplitByKind splits the unified registry by kind and saves each to +// the appropriate directory. Skills (Kind="" or "skill") go to skillsDir, +// agents (Kind="agent") go to agentsDir. +func (r *Registry) SaveSplitByKind(skillsDir, agentsDir string) error { + skillsReg := &Registry{} + agentsReg := &Registry{} + + for _, entry := range r.Skills { + if entry.EffectiveKind() == "agent" { + agentsReg.Skills = append(agentsReg.Skills, entry) + } else { + skillsReg.Skills = append(skillsReg.Skills, entry) + } + } + + if err := skillsReg.Save(skillsDir); err != nil { + return fmt.Errorf("failed to save skills registry: %w", err) + } + + if len(agentsReg.Skills) > 0 { + if err := agentsReg.Save(agentsDir); err != nil { + return fmt.Errorf("failed to save agents registry: %w", err) + } + } + + return nil +} diff --git a/internal/config/registry_test.go b/internal/config/registry_test.go index 08a089cd..ca8cbef6 100644 --- a/internal/config/registry_test.go +++ b/internal/config/registry_test.go @@ -130,6 +130,127 @@ func TestMigrateGlobalSkills_NoMigrationWhenRegistryExists(t *testing.T) { } } +func TestLoadUnifiedRegistry_MergesBoth(t *testing.T) { + skillsDir := t.TempDir() + agentsDir := t.TempDir() + + // Write skills registry + skillsReg := &Registry{ + Skills: []SkillEntry{ + {Name: "my-skill", Source: "github.com/user/skills-repo"}, + }, + } + if err := skillsReg.Save(skillsDir); err != nil { + t.Fatalf("Save skills: %v", err) + } + + // Write agents registry + agentsReg := &Registry{ + Skills: []SkillEntry{ + {Name: "my-agent", Source: "github.com/user/agents-repo"}, + }, + } + if err := agentsReg.Save(agentsDir); err != nil { + t.Fatalf("Save agents: %v", err) + } + + unified, err := LoadUnifiedRegistry(skillsDir, agentsDir) + if err != nil { + t.Fatalf("LoadUnifiedRegistry: %v", err) + } + + if len(unified.Skills) != 2 { + t.Fatalf("expected 2 entries, got %d", len(unified.Skills)) + } + + // Agent entry should have Kind="agent" + for _, e := range unified.Skills { + if e.Name == "my-agent" && e.EffectiveKind() != "agent" { + t.Errorf("agent entry should have kind=agent, got %q", e.Kind) + } + if e.Name == "my-skill" && e.EffectiveKind() != "skill" { + t.Errorf("skill entry should have kind=skill, got %q", e.Kind) + } + } +} + +func TestLoadUnifiedRegistry_EmptyAgents(t *testing.T) { + skillsDir := t.TempDir() + agentsDir := t.TempDir() // no registry file + + skillsReg := &Registry{ + Skills: []SkillEntry{ + {Name: "s1", Source: "test"}, + }, + } + skillsReg.Save(skillsDir) + + unified, err := LoadUnifiedRegistry(skillsDir, agentsDir) + if err != nil { + t.Fatalf("LoadUnifiedRegistry: %v", err) + } + if len(unified.Skills) != 1 { + t.Fatalf("expected 1 entry, got %d", len(unified.Skills)) + } +} + +func TestSaveSplitByKind_RoundTrip(t *testing.T) { + skillsDir := t.TempDir() + agentsDir := t.TempDir() + + unified := &Registry{ + Skills: []SkillEntry{ + {Name: "skill-a", Source: "s1"}, + {Name: "agent-b", Source: "s2", Kind: "agent"}, + {Name: "skill-c", Source: "s3", Kind: "skill"}, + }, + } + + if err := unified.SaveSplitByKind(skillsDir, agentsDir); err != nil { + t.Fatalf("SaveSplitByKind: %v", err) + } + + // Load back separately + skillsReg, err := LoadRegistry(skillsDir) + if err != nil { + t.Fatalf("LoadRegistry skills: %v", err) + } + if len(skillsReg.Skills) != 2 { + t.Fatalf("expected 2 skill entries, got %d", len(skillsReg.Skills)) + } + + agentsReg, err := LoadRegistry(agentsDir) + if err != nil { + t.Fatalf("LoadRegistry agents: %v", err) + } + if len(agentsReg.Skills) != 1 { + t.Fatalf("expected 1 agent entry, got %d", len(agentsReg.Skills)) + } + if agentsReg.Skills[0].Name != "agent-b" { + t.Errorf("agent name = %q, want %q", agentsReg.Skills[0].Name, "agent-b") + } +} + +func TestSaveSplitByKind_NoAgents_SkipsAgentFile(t *testing.T) { + skillsDir := t.TempDir() + agentsDir := t.TempDir() + + unified := &Registry{ + Skills: []SkillEntry{ + {Name: "only-skill", Source: "s1"}, + }, + } + + if err := unified.SaveSplitByKind(skillsDir, agentsDir); err != nil { + t.Fatalf("SaveSplitByKind: %v", err) + } + + // Agents dir should not have registry file + if _, err := os.Stat(filepath.Join(agentsDir, "registry.yaml")); err == nil { + t.Error("expected no agents registry.yaml when no agent entries") + } +} + func TestSourceRoot_NoGit(t *testing.T) { dir := t.TempDir() got := SourceRoot(dir) From 9a4fbdc18fecff34d978b3914130d929886be655 Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 23:11:13 +0800 Subject: [PATCH 004/205] feat(sync): add agent sync engine with merge, symlink, and copy modes Add SyncAgents() dispatcher supporting all three sync modes: - merge: per-file symlinks with local file preservation - symlink: whole directory symlink to agent source - copy: file copy with content-diff detection Add CheckAgentCollisions() to detect agents that flatten to the same filename from different subdirectories. Add PruneOrphanAgentLinks() for merge mode and PruneOrphanAgentCopies() for copy mode to clean up stale agent files in targets. Add CollectAgents() to copy non-symlinked .md files from target back to agent source directory. 18 tests covering all modes, collision detection, pruning, and collect. --- internal/sync/agent_sync.go | 380 +++++++++++++++++++++++++ internal/sync/agent_sync_test.go | 458 +++++++++++++++++++++++++++++++ 2 files changed, 838 insertions(+) create mode 100644 internal/sync/agent_sync.go create mode 100644 internal/sync/agent_sync_test.go diff --git a/internal/sync/agent_sync.go b/internal/sync/agent_sync.go new file mode 100644 index 00000000..41108472 --- /dev/null +++ b/internal/sync/agent_sync.go @@ -0,0 +1,380 @@ +package sync + +import ( + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "skillshare/internal/resource" + "skillshare/internal/utils" +) + +// AgentSyncResult holds the result of syncing agents to a target. +type AgentSyncResult struct { + Linked []string // Agents that were symlinked (merge) or copied (copy) + Skipped []string // Agents that already exist in target (kept local) + Updated []string // Agents that had broken symlinks fixed or content updated +} + +// AgentCollision represents two agents that flatten to the same filename. +type AgentCollision struct { + FlatName string // The colliding flat name (e.g. "helper.md") + PathA string // First agent relative path + PathB string // Second agent relative path +} + +// CheckAgentCollisions detects agents that flatten to the same filename. +func CheckAgentCollisions(agents []resource.DiscoveredResource) []AgentCollision { + seen := make(map[string]string) // flatName → first relPath + var collisions []AgentCollision + + for _, a := range agents { + if prev, ok := seen[a.FlatName]; ok { + collisions = append(collisions, AgentCollision{ + FlatName: a.FlatName, + PathA: prev, + PathB: a.RelPath, + }) + } else { + seen[a.FlatName] = a.RelPath + } + } + + return collisions +} + +// SyncAgents dispatches to the appropriate sync mode for agents. +// mode: "merge" (per-file symlinks), "symlink" (whole dir), "copy" (file copy). +func SyncAgents(agents []resource.DiscoveredResource, sourceDir, targetDir, mode string, dryRun, force bool) (*AgentSyncResult, error) { + switch mode { + case "symlink": + return syncAgentsSymlink(sourceDir, targetDir, dryRun, force) + case "copy": + return syncAgentsCopy(agents, targetDir, dryRun, force) + default: // "merge" or "" + return syncAgentsMerge(agents, targetDir, dryRun, force) + } +} + +// syncAgentsMerge creates per-file symlinks in targetDir for each discovered agent. +// Existing non-symlink files are preserved (skipped) unless force is true. +func syncAgentsMerge(agents []resource.DiscoveredResource, targetDir string, dryRun, force bool) (*AgentSyncResult, error) { + result := &AgentSyncResult{} + + if !dryRun { + if err := os.MkdirAll(targetDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agent target directory: %w", err) + } + } + + for _, agent := range agents { + targetPath := filepath.Join(targetDir, agent.FlatName) + + info, err := os.Lstat(targetPath) + if err == nil { + if info.Mode()&os.ModeSymlink != 0 { + absLink, linkErr := utils.ResolveLinkTarget(targetPath) + if linkErr != nil { + return nil, fmt.Errorf("failed to resolve link for %s: %w", agent.FlatName, linkErr) + } + absSource, _ := filepath.Abs(agent.AbsPath) + + if utils.PathsEqual(absLink, absSource) { + result.Linked = append(result.Linked, agent.FlatName) + continue + } + + if !dryRun { + os.Remove(targetPath) + if err := os.Symlink(agent.AbsPath, targetPath); err != nil { + return nil, fmt.Errorf("failed to create symlink for %s: %w", agent.FlatName, err) + } + } + result.Updated = append(result.Updated, agent.FlatName) + } else { + if force { + if !dryRun { + os.Remove(targetPath) + if err := os.Symlink(agent.AbsPath, targetPath); err != nil { + return nil, fmt.Errorf("failed to create symlink for %s: %w", agent.FlatName, err) + } + } + result.Updated = append(result.Updated, agent.FlatName) + } else { + result.Skipped = append(result.Skipped, agent.FlatName) + } + } + } else if os.IsNotExist(err) { + if !dryRun { + if err := os.Symlink(agent.AbsPath, targetPath); err != nil { + return nil, fmt.Errorf("failed to create symlink for %s: %w", agent.FlatName, err) + } + } + result.Linked = append(result.Linked, agent.FlatName) + } else { + return nil, fmt.Errorf("failed to check target path for %s: %w", agent.FlatName, err) + } + } + + return result, nil +} + +// syncAgentsSymlink creates a single directory symlink from targetDir to sourceDir. +// If targetDir already exists as a real directory, it's replaced only with force. +func syncAgentsSymlink(sourceDir, targetDir string, dryRun, force bool) (*AgentSyncResult, error) { + result := &AgentSyncResult{} + + if err := os.MkdirAll(filepath.Dir(targetDir), 0755); err != nil { + return nil, fmt.Errorf("failed to create target parent: %w", err) + } + + info, err := os.Lstat(targetDir) + if err == nil { + if info.Mode()&os.ModeSymlink != 0 { + // Already a symlink — check if correct + absLink, linkErr := utils.ResolveLinkTarget(targetDir) + if linkErr != nil { + return nil, fmt.Errorf("failed to resolve link: %w", linkErr) + } + absSource, _ := filepath.Abs(sourceDir) + + if utils.PathsEqual(absLink, absSource) { + result.Linked = append(result.Linked, "(directory)") + return result, nil + } + + // Wrong target + if !dryRun { + os.Remove(targetDir) + if err := os.Symlink(sourceDir, targetDir); err != nil { + return nil, fmt.Errorf("failed to create directory symlink: %w", err) + } + } + result.Updated = append(result.Updated, "(directory)") + } else { + // Real directory + if force { + if !dryRun { + os.RemoveAll(targetDir) + if err := os.Symlink(sourceDir, targetDir); err != nil { + return nil, fmt.Errorf("failed to create directory symlink: %w", err) + } + } + result.Updated = append(result.Updated, "(directory)") + } else { + result.Skipped = append(result.Skipped, "(directory)") + } + } + } else if os.IsNotExist(err) { + if !dryRun { + if err := os.Symlink(sourceDir, targetDir); err != nil { + return nil, fmt.Errorf("failed to create directory symlink: %w", err) + } + } + result.Linked = append(result.Linked, "(directory)") + } else { + return nil, fmt.Errorf("failed to check target path: %w", err) + } + + return result, nil +} + +// syncAgentsCopy copies agent .md files to targetDir. +// Existing files are overwritten if content differs; force replaces all. +func syncAgentsCopy(agents []resource.DiscoveredResource, targetDir string, dryRun, force bool) (*AgentSyncResult, error) { + result := &AgentSyncResult{} + + if !dryRun { + if err := os.MkdirAll(targetDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agent target directory: %w", err) + } + } + + for _, agent := range agents { + targetPath := filepath.Join(targetDir, agent.FlatName) + + srcData, err := os.ReadFile(agent.AbsPath) + if err != nil { + return nil, fmt.Errorf("failed to read source %s: %w", agent.FlatName, err) + } + + if _, statErr := os.Stat(targetPath); statErr == nil { + // File exists — check if content matches + tgtData, readErr := os.ReadFile(targetPath) + if readErr == nil && string(tgtData) == string(srcData) && !force { + result.Linked = append(result.Linked, agent.FlatName) + continue + } + // Content differs or force — overwrite + if !dryRun { + if err := os.WriteFile(targetPath, srcData, 0644); err != nil { + return nil, fmt.Errorf("failed to write %s: %w", agent.FlatName, err) + } + } + result.Updated = append(result.Updated, agent.FlatName) + } else { + // New file + if !dryRun { + if err := os.WriteFile(targetPath, srcData, 0644); err != nil { + return nil, fmt.Errorf("failed to write %s: %w", agent.FlatName, err) + } + } + result.Linked = append(result.Linked, agent.FlatName) + } + } + + return result, nil +} + +// SyncAgentsToTarget creates file symlinks in targetDir for each discovered agent. +// Uses merge semantics. Kept for backward compatibility; prefer SyncAgents(). +func SyncAgentsToTarget(agents []resource.DiscoveredResource, targetDir string, dryRun, force bool) (*AgentSyncResult, error) { + return syncAgentsMerge(agents, targetDir, dryRun, force) +} + +// PruneOrphanAgentLinks removes file symlinks in targetDir that don't +// correspond to any discovered agent. For merge mode only. +func PruneOrphanAgentLinks(targetDir string, agents []resource.DiscoveredResource, dryRun bool) (removed []string, _ error) { + entries, err := os.ReadDir(targetDir) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, fmt.Errorf("failed to read agent target directory: %w", err) + } + + expected := make(map[string]bool, len(agents)) + for _, a := range agents { + expected[a.FlatName] = true + } + + for _, entry := range entries { + name := entry.Name() + + if !strings.HasSuffix(strings.ToLower(name), ".md") { + continue + } + + info, err := entry.Info() + if err != nil { + continue + } + + if info.Mode()&os.ModeSymlink == 0 { + continue + } + + if expected[name] { + continue + } + + if !dryRun { + os.Remove(filepath.Join(targetDir, name)) + } + removed = append(removed, name) + } + + return removed, nil +} + +// PruneOrphanAgentCopies removes copied .md files in targetDir that don't +// correspond to any discovered agent. For copy mode only. +func PruneOrphanAgentCopies(targetDir string, agents []resource.DiscoveredResource, dryRun bool) (removed []string, _ error) { + entries, err := os.ReadDir(targetDir) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, fmt.Errorf("failed to read agent target directory: %w", err) + } + + expected := make(map[string]bool, len(agents)) + for _, a := range agents { + expected[a.FlatName] = true + } + + for _, entry := range entries { + name := entry.Name() + + if !strings.HasSuffix(strings.ToLower(name), ".md") { + continue + } + + // Skip conventional excludes (user might have README.md etc.) + if resource.ConventionalExcludes[name] { + continue + } + + if expected[name] { + continue + } + + if !dryRun { + os.Remove(filepath.Join(targetDir, name)) + } + removed = append(removed, name) + } + + return removed, nil +} + +// CollectAgents copies non-symlinked .md files from targetDir back to agentSourceDir. +// Returns the list of collected filenames. +func CollectAgents(targetDir, agentSourceDir string, dryRun bool, out io.Writer) ([]string, error) { + entries, err := os.ReadDir(targetDir) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, fmt.Errorf("failed to read agent target directory: %w", err) + } + + if out == nil { + out = DiagOutput + } + + var collected []string + for _, entry := range entries { + name := entry.Name() + + if !strings.HasSuffix(strings.ToLower(name), ".md") { + continue + } + + info, err := entry.Info() + if err != nil { + continue + } + + if info.Mode()&os.ModeSymlink != 0 { + continue + } + + if resource.ConventionalExcludes[name] { + continue + } + + srcPath := filepath.Join(targetDir, name) + dstPath := filepath.Join(agentSourceDir, name) + + if dryRun { + fmt.Fprintf(out, "[dry-run] Would collect agent: %s\n", name) + } else { + if err := os.MkdirAll(agentSourceDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agent source dir: %w", err) + } + data, err := os.ReadFile(srcPath) + if err != nil { + return nil, fmt.Errorf("failed to read %s: %w", name, err) + } + if err := os.WriteFile(dstPath, data, 0644); err != nil { + return nil, fmt.Errorf("failed to write %s: %w", name, err) + } + } + collected = append(collected, name) + } + + return collected, nil +} diff --git a/internal/sync/agent_sync_test.go b/internal/sync/agent_sync_test.go new file mode 100644 index 00000000..ecc87583 --- /dev/null +++ b/internal/sync/agent_sync_test.go @@ -0,0 +1,458 @@ +package sync + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/resource" +) + +func TestCheckAgentCollisions_NoCollision(t *testing.T) { + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", RelPath: "tutor.md"}, + {FlatName: "reviewer.md", RelPath: "reviewer.md"}, + } + collisions := CheckAgentCollisions(agents) + if len(collisions) != 0 { + t.Errorf("expected 0 collisions, got %d", len(collisions)) + } +} + +func TestCheckAgentCollisions_HasCollision(t *testing.T) { + agents := []resource.DiscoveredResource{ + {FlatName: "helper.md", RelPath: "a/helper.md"}, + {FlatName: "helper.md", RelPath: "b/helper.md"}, + } + collisions := CheckAgentCollisions(agents) + if len(collisions) != 1 { + t.Fatalf("expected 1 collision, got %d", len(collisions)) + } + if collisions[0].FlatName != "helper.md" { + t.Errorf("collision FlatName = %q", collisions[0].FlatName) + } +} + +func TestSyncAgentsToTarget_NewLinks(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + // Create agent source files + os.WriteFile(filepath.Join(sourceDir, "tutor.md"), []byte("# Tutor"), 0644) + os.WriteFile(filepath.Join(sourceDir, "reviewer.md"), []byte("# Reviewer"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: filepath.Join(sourceDir, "tutor.md")}, + {FlatName: "reviewer.md", AbsPath: filepath.Join(sourceDir, "reviewer.md")}, + } + + result, err := SyncAgentsToTarget(agents, targetDir, false, false) + if err != nil { + t.Fatalf("SyncAgentsToTarget: %v", err) + } + + if len(result.Linked) != 2 { + t.Errorf("expected 2 linked, got %d", len(result.Linked)) + } + + // Verify symlinks exist + for _, name := range []string{"tutor.md", "reviewer.md"} { + linkPath := filepath.Join(targetDir, name) + info, err := os.Lstat(linkPath) + if err != nil { + t.Errorf("expected symlink %s to exist", name) + continue + } + if info.Mode()&os.ModeSymlink == 0 { + t.Errorf("expected %s to be a symlink", name) + } + } +} + +func TestSyncAgentsToTarget_ExistingSymlinkCorrect(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + srcFile := filepath.Join(sourceDir, "tutor.md") + os.WriteFile(srcFile, []byte("# Tutor"), 0644) + + // Pre-create correct symlink + os.Symlink(srcFile, filepath.Join(targetDir, "tutor.md")) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: srcFile}, + } + + result, err := SyncAgentsToTarget(agents, targetDir, false, false) + if err != nil { + t.Fatalf("SyncAgentsToTarget: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked (existing correct), got %d", len(result.Linked)) + } + if len(result.Updated) != 0 { + t.Errorf("expected 0 updated, got %d", len(result.Updated)) + } +} + +func TestSyncAgentsToTarget_LocalFileSkipped(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + srcFile := filepath.Join(sourceDir, "tutor.md") + os.WriteFile(srcFile, []byte("# Tutor source"), 0644) + + // Pre-create local file (not a symlink) + os.WriteFile(filepath.Join(targetDir, "tutor.md"), []byte("# Local tutor"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: srcFile}, + } + + result, err := SyncAgentsToTarget(agents, targetDir, false, false) + if err != nil { + t.Fatalf("SyncAgentsToTarget: %v", err) + } + + if len(result.Skipped) != 1 { + t.Errorf("expected 1 skipped, got %d", len(result.Skipped)) + } +} + +func TestSyncAgentsToTarget_ForceReplacesLocal(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + srcFile := filepath.Join(sourceDir, "tutor.md") + os.WriteFile(srcFile, []byte("# Tutor source"), 0644) + + os.WriteFile(filepath.Join(targetDir, "tutor.md"), []byte("# Local"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: srcFile}, + } + + result, err := SyncAgentsToTarget(agents, targetDir, false, true) + if err != nil { + t.Fatalf("SyncAgentsToTarget: %v", err) + } + + if len(result.Updated) != 1 { + t.Errorf("expected 1 updated, got %d", len(result.Updated)) + } + + // Should now be a symlink + info, _ := os.Lstat(filepath.Join(targetDir, "tutor.md")) + if info.Mode()&os.ModeSymlink == 0 { + t.Error("expected symlink after force") + } +} + +func TestPruneOrphanAgentLinks(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + // Create source file and active symlink + srcFile := filepath.Join(sourceDir, "active.md") + os.WriteFile(srcFile, []byte("# Active"), 0644) + os.Symlink(srcFile, filepath.Join(targetDir, "active.md")) + + // Create orphan symlink + orphanSrc := filepath.Join(sourceDir, "orphan.md") + os.WriteFile(orphanSrc, []byte("# Orphan"), 0644) + os.Symlink(orphanSrc, filepath.Join(targetDir, "orphan.md")) + + // Create non-symlink file (should not be removed) + os.WriteFile(filepath.Join(targetDir, "local.md"), []byte("# Local"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "active.md"}, + } + + removed, err := PruneOrphanAgentLinks(targetDir, agents, false) + if err != nil { + t.Fatalf("PruneOrphanAgentLinks: %v", err) + } + + if len(removed) != 1 { + t.Fatalf("expected 1 removed, got %d: %v", len(removed), removed) + } + if removed[0] != "orphan.md" { + t.Errorf("expected orphan.md removed, got %q", removed[0]) + } + + // local.md should still exist + if _, err := os.Stat(filepath.Join(targetDir, "local.md")); err != nil { + t.Error("local.md should not be removed") + } +} + +func TestPruneOrphanAgentLinks_NonExistentDir(t *testing.T) { + removed, err := PruneOrphanAgentLinks("/nonexistent/path", nil, false) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(removed) != 0 { + t.Errorf("expected 0 removed, got %d", len(removed)) + } +} + +func TestCollectAgents(t *testing.T) { + targetDir := t.TempDir() + agentSourceDir := t.TempDir() + + // Create a local (non-symlink) agent file in target + os.WriteFile(filepath.Join(targetDir, "new-agent.md"), []byte("# New agent"), 0644) + + // Create a symlink (should be skipped) + srcFile := filepath.Join(agentSourceDir, "existing.md") + os.WriteFile(srcFile, []byte("# Existing"), 0644) + os.Symlink(srcFile, filepath.Join(targetDir, "existing.md")) + + // Create README (should be skipped) + os.WriteFile(filepath.Join(targetDir, "README.md"), []byte("# Readme"), 0644) + + // Create non-md file (should be skipped) + os.WriteFile(filepath.Join(targetDir, "config.yaml"), []byte("key: val"), 0644) + + collectDir := t.TempDir() + collected, err := CollectAgents(targetDir, collectDir, false, nil) + if err != nil { + t.Fatalf("CollectAgents: %v", err) + } + + if len(collected) != 1 { + t.Fatalf("expected 1 collected, got %d: %v", len(collected), collected) + } + if collected[0] != "new-agent.md" { + t.Errorf("collected = %q, want %q", collected[0], "new-agent.md") + } + + // Verify file was copied + data, err := os.ReadFile(filepath.Join(collectDir, "new-agent.md")) + if err != nil { + t.Fatalf("collected file not found: %v", err) + } + if string(data) != "# New agent" { + t.Errorf("collected content = %q", string(data)) + } +} + +// --- Symlink mode tests --- + +func TestSyncAgents_SymlinkMode_NewDir(t *testing.T) { + sourceDir := t.TempDir() + os.WriteFile(filepath.Join(sourceDir, "tutor.md"), []byte("# Tutor"), 0644) + + targetDir := filepath.Join(t.TempDir(), "agents") + + result, err := SyncAgents(nil, sourceDir, targetDir, "symlink", false, false) + if err != nil { + t.Fatalf("SyncAgents symlink: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked, got %d", len(result.Linked)) + } + + // targetDir should be a symlink to sourceDir + info, err := os.Lstat(targetDir) + if err != nil { + t.Fatalf("Lstat: %v", err) + } + if info.Mode()&os.ModeSymlink == 0 { + t.Error("expected targetDir to be a symlink") + } +} + +func TestSyncAgents_SymlinkMode_AlreadyCorrect(t *testing.T) { + sourceDir := t.TempDir() + parentDir := t.TempDir() + targetDir := filepath.Join(parentDir, "agents") + + os.Symlink(sourceDir, targetDir) + + result, err := SyncAgents(nil, sourceDir, targetDir, "symlink", false, false) + if err != nil { + t.Fatalf("SyncAgents symlink: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked (already correct), got %d", len(result.Linked)) + } + if len(result.Updated) != 0 { + t.Errorf("expected 0 updated, got %d", len(result.Updated)) + } +} + +func TestSyncAgents_SymlinkMode_RealDirSkipped(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() // real directory + + result, err := SyncAgents(nil, sourceDir, targetDir, "symlink", false, false) + if err != nil { + t.Fatalf("SyncAgents symlink: %v", err) + } + + if len(result.Skipped) != 1 { + t.Errorf("expected 1 skipped, got %d", len(result.Skipped)) + } +} + +// --- Copy mode tests --- + +func TestSyncAgents_CopyMode_NewFiles(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + os.WriteFile(filepath.Join(sourceDir, "tutor.md"), []byte("# Tutor"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: filepath.Join(sourceDir, "tutor.md")}, + } + + result, err := SyncAgents(agents, sourceDir, targetDir, "copy", false, false) + if err != nil { + t.Fatalf("SyncAgents copy: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked (new copy), got %d", len(result.Linked)) + } + + // Verify it's a real file, not a symlink + info, _ := os.Lstat(filepath.Join(targetDir, "tutor.md")) + if info.Mode()&os.ModeSymlink != 0 { + t.Error("copy mode should create real files, not symlinks") + } + + data, _ := os.ReadFile(filepath.Join(targetDir, "tutor.md")) + if string(data) != "# Tutor" { + t.Errorf("content = %q", string(data)) + } +} + +func TestSyncAgents_CopyMode_SameContent(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + os.WriteFile(filepath.Join(sourceDir, "tutor.md"), []byte("# Same"), 0644) + os.WriteFile(filepath.Join(targetDir, "tutor.md"), []byte("# Same"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: filepath.Join(sourceDir, "tutor.md")}, + } + + result, err := SyncAgents(agents, sourceDir, targetDir, "copy", false, false) + if err != nil { + t.Fatalf("SyncAgents copy: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked (same content), got %d", len(result.Linked)) + } + if len(result.Updated) != 0 { + t.Errorf("expected 0 updated, got %d", len(result.Updated)) + } +} + +func TestSyncAgents_CopyMode_DifferentContent(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + os.WriteFile(filepath.Join(sourceDir, "tutor.md"), []byte("# New"), 0644) + os.WriteFile(filepath.Join(targetDir, "tutor.md"), []byte("# Old"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "tutor.md", AbsPath: filepath.Join(sourceDir, "tutor.md")}, + } + + result, err := SyncAgents(agents, sourceDir, targetDir, "copy", false, false) + if err != nil { + t.Fatalf("SyncAgents copy: %v", err) + } + + if len(result.Updated) != 1 { + t.Errorf("expected 1 updated, got %d", len(result.Updated)) + } + + data, _ := os.ReadFile(filepath.Join(targetDir, "tutor.md")) + if string(data) != "# New" { + t.Errorf("content = %q, want %q", string(data), "# New") + } +} + +func TestPruneOrphanAgentCopies(t *testing.T) { + targetDir := t.TempDir() + + os.WriteFile(filepath.Join(targetDir, "active.md"), []byte("# Active"), 0644) + os.WriteFile(filepath.Join(targetDir, "orphan.md"), []byte("# Orphan"), 0644) + os.WriteFile(filepath.Join(targetDir, "README.md"), []byte("# Readme"), 0644) // conventional, skip + + agents := []resource.DiscoveredResource{ + {FlatName: "active.md"}, + } + + removed, err := PruneOrphanAgentCopies(targetDir, agents, false) + if err != nil { + t.Fatalf("PruneOrphanAgentCopies: %v", err) + } + + if len(removed) != 1 || removed[0] != "orphan.md" { + t.Errorf("expected [orphan.md] removed, got %v", removed) + } + + // README.md should still exist + if _, err := os.Stat(filepath.Join(targetDir, "README.md")); err != nil { + t.Error("README.md should not be removed") + } +} + +// --- Dispatch tests --- + +func TestSyncAgents_DefaultIsMerge(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + os.WriteFile(filepath.Join(sourceDir, "a.md"), []byte("# A"), 0644) + + agents := []resource.DiscoveredResource{ + {FlatName: "a.md", AbsPath: filepath.Join(sourceDir, "a.md")}, + } + + result, err := SyncAgents(agents, sourceDir, targetDir, "", false, false) + if err != nil { + t.Fatalf("SyncAgents default: %v", err) + } + + if len(result.Linked) != 1 { + t.Errorf("expected 1 linked, got %d", len(result.Linked)) + } + + // Should be a symlink (merge mode) + info, _ := os.Lstat(filepath.Join(targetDir, "a.md")) + if info.Mode()&os.ModeSymlink == 0 { + t.Error("default mode should create symlinks (merge)") + } +} + +func TestCollectAgents_DryRun(t *testing.T) { + targetDir := t.TempDir() + os.WriteFile(filepath.Join(targetDir, "agent.md"), []byte("# Agent"), 0644) + + collectDir := t.TempDir() + collected, err := CollectAgents(targetDir, collectDir, true, nil) + if err != nil { + t.Fatalf("CollectAgents dry-run: %v", err) + } + + if len(collected) != 1 { + t.Fatalf("expected 1 collected in dry-run, got %d", len(collected)) + } + + // File should NOT exist (dry-run) + if _, err := os.Stat(filepath.Join(collectDir, "agent.md")); err == nil { + t.Error("file should not exist in dry-run") + } +} From ba1b03f5f99048045da7dd848aa14846baf0f6ee Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 23:42:06 +0800 Subject: [PATCH 005/205] feat(install): add agent discovery, install flow, and --kind/--agent flags Extend DiscoveryResult with Agents field and auto-detection rules: - agents/ convention directory scanned for .md files - pure agent repo fallback (no SKILL.md, no agents/ dir, root .md files) - conventional excludes (README/LICENSE/CHANGELOG/SKILL.md) Add InstallAgentFromDiscovery() for single-file agent install with per-agent metadata (.skillshare-meta.json with kind: agent). Add Kind field to SkillMeta and InstallOptions. New CLI flags: - --kind : explicit resource kind selection - --agent/-a : select specific agents (implies --kind agent) Add AgentInfo struct, DiscoveryResult helper methods (HasAgents, HasSkills, IsMixed), and InstallOptions helpers (IsAgentMode, HasAgentFilter). --- cmd/skillshare/install.go | 17 ++++++ internal/install/install.go | 33 ++++++++++- internal/install/install_apply.go | 79 +++++++++++++++++++++++++ internal/install/install_discovery.go | 85 +++++++++++++++++++++++++++ internal/install/meta.go | 11 +++- 5 files changed, 223 insertions(+), 2 deletions(-) diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index 54137642..cf304bb5 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -97,6 +97,23 @@ func parseInstallArgs(args []string) (*installArgs, bool, error) { result.opts.Branch = branch case arg == "--track" || arg == "-t": result.opts.Track = true + case arg == "--kind": + if i+1 >= len(args) { + return nil, false, fmt.Errorf("--kind requires a value (skill or agent)") + } + i++ + kind := strings.ToLower(args[i]) + if kind != "skill" && kind != "agent" { + return nil, false, fmt.Errorf("--kind must be 'skill' or 'agent', got %q", args[i]) + } + result.opts.Kind = kind + case arg == "--agent" || arg == "-a": + if i+1 >= len(args) { + return nil, false, fmt.Errorf("-a requires agent name(s)") + } + i++ + result.opts.AgentNames = strings.Split(args[i], ",") + result.opts.Kind = "agent" case arg == "--skill" || arg == "-s": if i+1 >= len(args) { return nil, false, fmt.Errorf("--skill requires a value") diff --git a/internal/install/install.go b/internal/install/install.go index b368297d..098f2a19 100644 --- a/internal/install/install.go +++ b/internal/install/install.go @@ -10,12 +10,14 @@ import ( // InstallOptions configures the install behavior type InstallOptions struct { Name string // Override skill name + Kind string // "skill", "agent", or "" (auto-detect) Force bool // Overwrite existing DryRun bool // Preview only Update bool // Update existing installation Track bool // Install as tracked repository (preserves .git) OnProgress ProgressCallback Skills []string // Select specific skills from multi-skill repo (comma-separated) + AgentNames []string // Select specific agents from repo (comma-separated) Exclude []string // Skills to exclude from installation (comma-separated) All bool // Install all discovered skills without prompting Yes bool // Auto-accept all prompts (equivalent to --all for multi-skill repos) @@ -28,6 +30,12 @@ type InstallOptions struct { Branch string // Git branch to clone from (empty = remote default) } +// IsAgentMode returns true if explicitly installing agents. +func (o InstallOptions) IsAgentMode() bool { return o.Kind == "agent" } + +// HasAgentFilter returns true if specific agents were requested via -a flag. +func (o InstallOptions) HasAgentFilter() bool { return len(o.AgentNames) > 0 } + // ShouldInstallAll returns true if all discovered skills should be installed without prompting. func (o InstallOptions) ShouldInstallAll() bool { return o.All || o.Yes } @@ -55,15 +63,38 @@ type SkillInfo struct { Description string // Description from SKILL.md frontmatter (if any) } -// DiscoveryResult contains discovered skills from a repository +// AgentInfo represents a discovered agent (.md file) in a repository +type AgentInfo struct { + Name string // Agent name (filename without .md) + Path string // Relative path from repo root (e.g. "agents/tutor.md") + FileName string // Filename (e.g. "tutor.md") +} + +// DiscoveryResult contains discovered skills and agents from a repository type DiscoveryResult struct { RepoPath string // Temp directory where repo was cloned Skills []SkillInfo // Discovered skills + Agents []AgentInfo // Discovered agents Source *Source // Original source CommitHash string // Source commit hash when available Warnings []string // Non-fatal warnings during discovery } +// HasAgents reports whether the discovery found any agents. +func (d *DiscoveryResult) HasAgents() bool { + return len(d.Agents) > 0 +} + +// HasSkills reports whether the discovery found any skills. +func (d *DiscoveryResult) HasSkills() bool { + return len(d.Skills) > 0 +} + +// IsMixed reports whether the discovery found both skills and agents. +func (d *DiscoveryResult) IsMixed() bool { + return d.HasSkills() && d.HasAgents() +} + // TrackedRepoResult reports the outcome of a tracked repo installation type TrackedRepoResult struct { RepoName string // Name of the tracked repo (e.g., "_team-skills") diff --git a/internal/install/install_apply.go b/internal/install/install_apply.go index 4d4c449b..805b7e80 100644 --- a/internal/install/install_apply.go +++ b/internal/install/install_apply.go @@ -1,9 +1,12 @@ package install import ( + "encoding/json" "fmt" "os" "path/filepath" + + "skillshare/internal/utils" ) // buildDiscoverySkillSource constructs metadata Source string for a skill @@ -381,6 +384,82 @@ func checkSkillFile(skillPath string, result *InstallResult) { } } +// InstallAgentFromDiscovery installs a single agent (.md file) from a discovery result. +// Unlike skill install (directory copy), agent install copies a single file. +func InstallAgentFromDiscovery(discovery *DiscoveryResult, agent AgentInfo, destDir string, opts InstallOptions) (*InstallResult, error) { + result := &InstallResult{ + SkillName: agent.Name, + Source: buildDiscoverySkillSource(discovery.Source, agent.Path), + } + + destFile := filepath.Join(destDir, agent.FileName) + result.SkillPath = destFile + + if opts.DryRun { + result.Action = "would install" + return result, nil + } + + if err := os.MkdirAll(destDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agents directory: %w", err) + } + + // Check if file exists + if _, err := os.Stat(destFile); err == nil && !opts.Force { + result.Action = "skipped" + result.Warnings = append(result.Warnings, "agent already exists (use --force to overwrite)") + return result, nil + } + + // Determine source path in temp repo + var srcPath string + if discovery.Source.HasSubdir() { + srcPath = filepath.Join(discovery.RepoPath, "repo", discovery.Source.Subdir, agent.Path) + } else { + srcPath = filepath.Join(discovery.RepoPath, "repo", agent.Path) + } + + data, err := os.ReadFile(srcPath) + if err != nil { + return nil, fmt.Errorf("failed to read agent %s: %w", agent.FileName, err) + } + + if err := os.WriteFile(destFile, data, 0644); err != nil { + return nil, fmt.Errorf("failed to write agent %s: %w", agent.FileName, err) + } + + // Write metadata alongside the agent file (as .skillshare-meta.json) + source := &Source{ + Type: discovery.Source.Type, + Raw: result.Source, + CloneURL: discovery.Source.CloneURL, + Subdir: agent.Path, + Name: agent.Name, + } + meta := NewMetaFromSource(source) + meta.Kind = "agent" + if discovery.CommitHash != "" { + meta.Version = discovery.CommitHash + } + // For agents, file_hashes is just the single file + if hash, hashErr := computeSingleFileHash(destFile); hashErr == nil { + meta.FileHashes = map[string]string{agent.FileName: hash} + } + + metaPath := filepath.Join(destDir, agent.Name+".skillshare-meta.json") + if metaData, marshalErr := json.MarshalIndent(meta, "", " "); marshalErr == nil { + os.WriteFile(metaPath, metaData, 0644) + } + + result.Action = "installed" + return result, nil +} + +// computeSingleFileHash computes the sha256 hash for a single file. +func computeSingleFileHash(filePath string) (string, error) { + return utils.FileHashFormatted(filePath) +} + // auditInstalledSkill scans the installed skill for security threats. // It blocks installation when findings are at or above configured threshold // unless force is enabled. diff --git a/internal/install/install_discovery.go b/internal/install/install_discovery.go index 7cf7cee7..8a9736cc 100644 --- a/internal/install/install_discovery.go +++ b/internal/install/install_discovery.go @@ -43,11 +43,15 @@ func discoverFromGitWithProgressImpl(source *Source, onProgress ProgressCallback } } + // Discover agents (agents/ dir or pure agent repo fallback) + agents := discoverAgents(repoPath, len(skills) > 0) + commitHash, _ := getGitCommit(repoPath) return &DiscoveryResult{ RepoPath: tempDir, Skills: skills, + Agents: agents, Source: source, CommitHash: commitHash, }, nil @@ -162,6 +166,87 @@ func discoverSkills(repoPath string, includeRoot bool) []SkillInfo { return skills } +// discoverAgents finds .md files in an agents/ convention directory. +// Also detects "pure agent repos" — repos with no SKILL.md and no agents/ dir +// but with .md files at root (per D5 rule 4). +func discoverAgents(repoPath string, hasSkills bool) []AgentInfo { + var agents []AgentInfo + + // Rule 2: Check agents/ convention directory + agentsDir := filepath.Join(repoPath, "agents") + if info, err := os.Stat(agentsDir); err == nil && info.IsDir() { + agents = append(agents, scanAgentDir(repoPath, agentsDir)...) + return agents + } + + // Rule 4: Pure agent repo fallback — no skills, no agents/ dir, root has .md files + if !hasSkills { + agents = append(agents, scanAgentDir(repoPath, repoPath)...) + } + + return agents +} + +// scanAgentDir scans a directory for .md agent files, excluding conventional files. +func scanAgentDir(repoRoot, dir string) []AgentInfo { + var agents []AgentInfo + + filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + + if info.IsDir() { + name := info.Name() + if name == ".git" || TargetDotDirs[name] { + return filepath.SkipDir + } + return nil + } + + if !strings.HasSuffix(strings.ToLower(info.Name()), ".md") { + return nil + } + + // Skip conventional excludes + if conventionalAgentExcludes[info.Name()] { + return nil + } + + // Skip hidden files + if strings.HasPrefix(info.Name(), ".") { + return nil + } + + relPath, relErr := filepath.Rel(repoRoot, path) + if relErr != nil { + return nil + } + relPath = strings.ReplaceAll(relPath, "\\", "/") + + name := strings.TrimSuffix(info.Name(), ".md") + + agents = append(agents, AgentInfo{ + Name: name, + Path: relPath, + FileName: info.Name(), + }) + + return nil + }) + + return agents +} + +var conventionalAgentExcludes = map[string]bool{ + "README.md": true, + "CHANGELOG.md": true, + "LICENSE.md": true, + "HISTORY.md": true, + "SECURITY.md": true, + "SKILL.md": true, +} + // DiscoverFromGitSubdir clones a repo and discovers skills within a subdirectory // Unlike DiscoverFromGit, this includes root-level SKILL.md of the subdir diff --git a/internal/install/meta.go b/internal/install/meta.go index fb7713f9..d577d4d0 100644 --- a/internal/install/meta.go +++ b/internal/install/meta.go @@ -14,9 +14,10 @@ import ( // MetaFileName is the name of the skillshare metadata file stored in each skill directory. const MetaFileName = ".skillshare-meta.json" -// SkillMeta contains metadata about an installed skill +// SkillMeta contains metadata about an installed skill or agent type SkillMeta struct { Source string `json:"source"` // Original source input + Kind string `json:"kind,omitempty"` // "skill" (default/empty) or "agent" Type string `json:"type"` // Source type (github, local, etc.) InstalledAt time.Time `json:"installed_at"` // Installation timestamp RepoURL string `json:"repo_url,omitempty"` // Git repo URL (for git sources) @@ -27,6 +28,14 @@ type SkillMeta struct { Branch string `json:"branch,omitempty"` // Git branch (when non-default) } +// EffectiveKind returns "skill" if Kind is empty, otherwise the Kind value. +func (m *SkillMeta) EffectiveKind() string { + if m.Kind == "" { + return "skill" + } + return m.Kind +} + // WriteMeta saves metadata to the skill directory func WriteMeta(skillPath string, meta *SkillMeta) error { metaPath := filepath.Join(skillPath, MetaFileName) From 949fd444e040452fa9d9cee7bc947054973d6a86 Mon Sep 17 00:00:00 2001 From: Willie Date: Sun, 29 Mar 2026 23:58:38 +0800 Subject: [PATCH 006/205] feat(check,trash): add agent drift detection and agent trash support Add CheckAgents() in internal/check/ that scans agents source directory, reads per-agent metadata (.skillshare-meta.json), and compares file hashes to detect drift. Statuses: up_to_date, drifted, local, error. Add agent trash functions in internal/trash/: - AgentTrashDir() / ProjectAgentTrashDir() for agent-specific trash paths - MoveAgentToTrash() moves agent .md file + metadata to timestamped trash subdirectory with rename-or-copy-then-delete fallback 6 unit tests for agent check covering empty dir, local agent, hash match, drift detection, nonexistent dir, and non-md filtering. --- internal/check/agent_check.go | 89 +++++++++++++++++++++++++ internal/check/agent_check_test.go | 100 +++++++++++++++++++++++++++++ internal/trash/trash.go | 48 ++++++++++++++ 3 files changed, 237 insertions(+) create mode 100644 internal/check/agent_check.go create mode 100644 internal/check/agent_check_test.go diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go new file mode 100644 index 00000000..77d13112 --- /dev/null +++ b/internal/check/agent_check.go @@ -0,0 +1,89 @@ +package check + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + + "skillshare/internal/install" + "skillshare/internal/utils" +) + +// AgentCheckResult holds the check result for a single agent. +type AgentCheckResult struct { + Name string `json:"name"` + Source string `json:"source,omitempty"` + Version string `json:"version,omitempty"` + Status string `json:"status"` // "up_to_date", "drifted", "local", "error" + Message string `json:"message,omitempty"` +} + +// CheckAgents scans the agents source directory for installed agents and +// compares their file hashes against metadata to detect drift. +func CheckAgents(agentsDir string) []AgentCheckResult { + entries, err := os.ReadDir(agentsDir) + if err != nil { + return nil + } + + var results []AgentCheckResult + + for _, entry := range entries { + name := entry.Name() + + // Agent .md files + if !entry.IsDir() && strings.HasSuffix(strings.ToLower(name), ".md") { + agentName := strings.TrimSuffix(name, ".md") + result := checkOneAgent(agentsDir, agentName, name) + results = append(results, result) + } + } + + return results +} + +func checkOneAgent(agentsDir, agentName, fileName string) AgentCheckResult { + result := AgentCheckResult{Name: agentName} + + // Look for metadata file: .skillshare-meta.json + metaPath := filepath.Join(agentsDir, agentName+".skillshare-meta.json") + metaData, err := os.ReadFile(metaPath) + if err != nil { + result.Status = "local" + return result + } + + var meta install.SkillMeta + if err := json.Unmarshal(metaData, &meta); err != nil { + result.Status = "error" + result.Message = "invalid metadata" + return result + } + + result.Source = meta.Source + result.Version = meta.Version + + // Compare file hash + agentPath := filepath.Join(agentsDir, fileName) + if meta.FileHashes == nil || meta.FileHashes[fileName] == "" { + result.Status = "local" + return result + } + + currentHash, err := utils.FileHashFormatted(agentPath) + if err != nil { + result.Status = "error" + result.Message = "cannot hash file" + return result + } + + if currentHash == meta.FileHashes[fileName] { + result.Status = "up_to_date" + } else { + result.Status = "drifted" + result.Message = "file content changed since install" + } + + return result +} diff --git a/internal/check/agent_check_test.go b/internal/check/agent_check_test.go new file mode 100644 index 00000000..2990d4e8 --- /dev/null +++ b/internal/check/agent_check_test.go @@ -0,0 +1,100 @@ +package check + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "skillshare/internal/install" + "skillshare/internal/utils" +) + +func TestCheckAgents_NoAgents(t *testing.T) { + dir := t.TempDir() + results := CheckAgents(dir) + if len(results) != 0 { + t.Errorf("expected 0 results, got %d", len(results)) + } +} + +func TestCheckAgents_LocalAgent(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, "tutor.md"), []byte("# Tutor"), 0644) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Name != "tutor" { + t.Errorf("Name = %q, want %q", results[0].Name, "tutor") + } + if results[0].Status != "local" { + t.Errorf("Status = %q, want %q", results[0].Status, "local") + } +} + +func TestCheckAgents_UpToDate(t *testing.T) { + dir := t.TempDir() + agentFile := filepath.Join(dir, "tutor.md") + os.WriteFile(agentFile, []byte("# Tutor agent"), 0644) + + hash, _ := utils.FileHashFormatted(agentFile) + + meta := &install.SkillMeta{ + Source: "test", + Kind: "agent", + FileHashes: map[string]string{"tutor.md": hash}, + } + metaData, _ := json.MarshalIndent(meta, "", " ") + os.WriteFile(filepath.Join(dir, "tutor.skillshare-meta.json"), metaData, 0644) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Status != "up_to_date" { + t.Errorf("Status = %q, want %q", results[0].Status, "up_to_date") + } +} + +func TestCheckAgents_Drifted(t *testing.T) { + dir := t.TempDir() + agentFile := filepath.Join(dir, "tutor.md") + os.WriteFile(agentFile, []byte("# Modified content"), 0644) + + meta := &install.SkillMeta{ + Source: "test", + Kind: "agent", + FileHashes: map[string]string{"tutor.md": "sha256:0000000000000000000000000000000000000000000000000000000000000000"}, + } + metaData, _ := json.MarshalIndent(meta, "", " ") + os.WriteFile(filepath.Join(dir, "tutor.skillshare-meta.json"), metaData, 0644) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Status != "drifted" { + t.Errorf("Status = %q, want %q", results[0].Status, "drifted") + } +} + +func TestCheckAgents_NonExistentDir(t *testing.T) { + results := CheckAgents("/nonexistent/path") + if results != nil { + t.Errorf("expected nil for nonexistent dir, got %v", results) + } +} + +func TestCheckAgents_SkipsNonMd(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, "tutor.md"), []byte("# Tutor"), 0644) + os.WriteFile(filepath.Join(dir, "config.yaml"), []byte("key: val"), 0644) + os.MkdirAll(filepath.Join(dir, "subdir"), 0755) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result (only .md files), got %d", len(results)) + } +} diff --git a/internal/trash/trash.go b/internal/trash/trash.go index 11d31463..fde6e651 100644 --- a/internal/trash/trash.go +++ b/internal/trash/trash.go @@ -24,6 +24,54 @@ func ProjectTrashDir(root string) string { return filepath.Join(root, ".skillshare", "trash") } +// AgentTrashDir returns the global trash directory for agents. +func AgentTrashDir() string { + return filepath.Join(config.DataDir(), "trash", "agents") +} + +// ProjectAgentTrashDir returns the project-level trash directory for agents. +func ProjectAgentTrashDir(root string) string { + return filepath.Join(root, ".skillshare", "trash", "agents") +} + +// MoveAgentToTrash moves an agent file (and its metadata) to the trash directory. +func MoveAgentToTrash(agentFile, metaFile, name, trashBase string) (string, error) { + timestamp := time.Now().Format("2006-01-02_15-04-05") + trashDir := filepath.Join(trashBase, name+"_"+timestamp) + + if err := os.MkdirAll(trashDir, 0755); err != nil { + return "", fmt.Errorf("failed to create agent trash dir: %w", err) + } + + // Move agent .md file + destFile := filepath.Join(trashDir, filepath.Base(agentFile)) + if err := os.Rename(agentFile, destFile); err != nil { + // Fallback: copy then delete + data, readErr := os.ReadFile(agentFile) + if readErr != nil { + return "", fmt.Errorf("failed to read agent for trash: %w", readErr) + } + if writeErr := os.WriteFile(destFile, data, 0644); writeErr != nil { + return "", fmt.Errorf("failed to write agent to trash: %w", writeErr) + } + os.Remove(agentFile) + } + + // Move metadata if exists + if metaFile != "" { + if _, err := os.Stat(metaFile); err == nil { + destMeta := filepath.Join(trashDir, filepath.Base(metaFile)) + if err := os.Rename(metaFile, destMeta); err != nil { + data, _ := os.ReadFile(metaFile) + os.WriteFile(destMeta, data, 0644) + os.Remove(metaFile) + } + } + } + + return trashDir, nil +} + // TrashEntry holds information about a trashed item. type TrashEntry struct { Name string // Original skill name From e2101ab5e9cbdea814e330c8d3afe69378abd407 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:01:07 +0800 Subject: [PATCH 007/205] feat(cli): add resource kind filter parsing for CLI commands Add kind_filter.go with two parsing strategies: - parseKindArg(): positional syntax (list agents, sync skills) - parseKindFlag(): flag syntax (uninstall --kind agent tutor) resourceKindFilter type with IncludesSkills()/IncludesAgents() helpers for handler-level routing. Supports singular and plural forms (skill/skills, agent/agents). 16 test cases covering both parsers and filter logic. --- cmd/skillshare/kind_filter.go | 78 ++++++++++++++++++++++++++++++ cmd/skillshare/kind_filter_test.go | 76 +++++++++++++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 cmd/skillshare/kind_filter.go create mode 100644 cmd/skillshare/kind_filter_test.go diff --git a/cmd/skillshare/kind_filter.go b/cmd/skillshare/kind_filter.go new file mode 100644 index 00000000..ec44159f --- /dev/null +++ b/cmd/skillshare/kind_filter.go @@ -0,0 +1,78 @@ +package main + +import "fmt" + +// resourceKindFilter represents the kind filtering for CLI commands. +type resourceKindFilter int + +const ( + kindAll resourceKindFilter = iota // no filter — all kinds + kindSkills // skills only + kindAgents // agents only +) + +// parseKindArg extracts a kind filter from the first positional argument. +// Returns the filter and remaining args. +// Recognized values: "skills", "skill", "agents", "agent". +// If the first arg is not a kind keyword, returns kindAll with args unchanged. +func parseKindArg(args []string) (resourceKindFilter, []string) { + if len(args) == 0 { + return kindAll, args + } + + switch args[0] { + case "skills", "skill": + return kindSkills, args[1:] + case "agents", "agent": + return kindAgents, args[1:] + default: + return kindAll, args + } +} + +// parseKindFlag extracts --kind flag from args. +// Returns the filter and remaining args with --kind removed. +func parseKindFlag(args []string) (resourceKindFilter, []string, error) { + kind := kindAll + rest := make([]string, 0, len(args)) + + for i := 0; i < len(args); i++ { + if args[i] == "--kind" { + if i+1 >= len(args) { + return kindAll, nil, fmt.Errorf("--kind requires a value (skill or agent)") + } + i++ + switch args[i] { + case "skill", "skills": + kind = kindSkills + case "agent", "agents": + kind = kindAgents + default: + return kindAll, nil, fmt.Errorf("--kind must be 'skill' or 'agent', got %q", args[i]) + } + } else { + rest = append(rest, args[i]) + } + } + + return kind, rest, nil +} + +func (k resourceKindFilter) String() string { + switch k { + case kindSkills: + return "skills" + case kindAgents: + return "agents" + default: + return "all" + } +} + +func (k resourceKindFilter) IncludesSkills() bool { + return k == kindAll || k == kindSkills +} + +func (k resourceKindFilter) IncludesAgents() bool { + return k == kindAll || k == kindAgents +} diff --git a/cmd/skillshare/kind_filter_test.go b/cmd/skillshare/kind_filter_test.go new file mode 100644 index 00000000..3a9fb511 --- /dev/null +++ b/cmd/skillshare/kind_filter_test.go @@ -0,0 +1,76 @@ +package main + +import "testing" + +func TestParseKindArg(t *testing.T) { + tests := []struct { + args []string + wantKind resourceKindFilter + wantRest []string + }{ + {nil, kindAll, nil}, + {[]string{}, kindAll, []string{}}, + {[]string{"skills"}, kindSkills, []string{}}, + {[]string{"skill"}, kindSkills, []string{}}, + {[]string{"agents"}, kindAgents, []string{}}, + {[]string{"agent"}, kindAgents, []string{}}, + {[]string{"agents", "tutor"}, kindAgents, []string{"tutor"}}, + {[]string{"--json"}, kindAll, []string{"--json"}}, + {[]string{"my-skill"}, kindAll, []string{"my-skill"}}, + } + + for _, tt := range tests { + kind, rest := parseKindArg(tt.args) + if kind != tt.wantKind { + t.Errorf("parseKindArg(%v) kind = %v, want %v", tt.args, kind, tt.wantKind) + } + if len(rest) != len(tt.wantRest) { + t.Errorf("parseKindArg(%v) rest = %v, want %v", tt.args, rest, tt.wantRest) + } + } +} + +func TestParseKindFlag(t *testing.T) { + tests := []struct { + args []string + wantKind resourceKindFilter + wantRest []string + wantErr bool + }{ + {[]string{}, kindAll, []string{}, false}, + {[]string{"--kind", "agent"}, kindAgents, []string{}, false}, + {[]string{"--kind", "skill"}, kindSkills, []string{}, false}, + {[]string{"--json", "--kind", "agent", "foo"}, kindAgents, []string{"--json", "foo"}, false}, + {[]string{"--kind"}, kindAll, nil, true}, + {[]string{"--kind", "invalid"}, kindAll, nil, true}, + } + + for _, tt := range tests { + kind, rest, err := parseKindFlag(tt.args) + if (err != nil) != tt.wantErr { + t.Errorf("parseKindFlag(%v) err = %v, wantErr %v", tt.args, err, tt.wantErr) + continue + } + if err != nil { + continue + } + if kind != tt.wantKind { + t.Errorf("parseKindFlag(%v) kind = %v, want %v", tt.args, kind, tt.wantKind) + } + if len(rest) != len(tt.wantRest) { + t.Errorf("parseKindFlag(%v) rest = %v, want %v", tt.args, rest, tt.wantRest) + } + } +} + +func TestResourceKindFilter_Includes(t *testing.T) { + if !kindAll.IncludesSkills() || !kindAll.IncludesAgents() { + t.Error("kindAll should include both") + } + if !kindSkills.IncludesSkills() || kindSkills.IncludesAgents() { + t.Error("kindSkills should include only skills") + } + if kindAgents.IncludesSkills() || !kindAgents.IncludesAgents() { + t.Error("kindAgents should include only agents") + } +} From 12f8c52a11413633f44a4ae55b9aab34a24d5ebd Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:05:10 +0800 Subject: [PATCH 008/205] feat: add .agentignore support and agent file validation Add ReadAgentIgnoreMatcher() in skillignore package to read .agentignore and .agentignore.local files using the same gitignore-style pattern format as .skillignore. Integrate .agentignore filtering into AgentKind.Discover() with both directory-level CanSkipDir() and file-level Match() checks. Add agent validation in internal/validate/: - AgentFile(): extension check, filename restrictions (no spaces/special chars), file size warning (>100KB), existence and type checks - AgentName(): name format validation (alphanumeric + hyphens/underscores) 9 new tests across resource and validate packages. --- internal/resource/agent.go | 19 ++++++ internal/resource/kind_test.go | 23 ++++++++ internal/skillignore/skillignore.go | 25 ++++++++ internal/validate/agent.go | 84 +++++++++++++++++++++++++++ internal/validate/agent_test.go | 89 +++++++++++++++++++++++++++++ 5 files changed, 240 insertions(+) create mode 100644 internal/validate/agent.go create mode 100644 internal/validate/agent_test.go diff --git a/internal/resource/agent.go b/internal/resource/agent.go index 689ac895..994c773a 100644 --- a/internal/resource/agent.go +++ b/internal/resource/agent.go @@ -5,6 +5,7 @@ import ( "path/filepath" "strings" + "skillshare/internal/skillignore" "skillshare/internal/utils" ) @@ -20,6 +21,9 @@ func (AgentKind) Kind() string { return "agent" } func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { walkRoot := utils.ResolveSymlink(sourceDir) + // Read .agentignore for filtering + ignoreMatcher := skillignore.ReadAgentIgnoreMatcher(walkRoot) + var resources []DiscoveredResource err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { @@ -31,6 +35,16 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { if info.Name() == ".git" || utils.IsHidden(info.Name()) && info.Name() != "." { return filepath.SkipDir } + // Skip ignored directories early + if ignoreMatcher.HasRules() && info.Name() != "." { + relDir, relErr := filepath.Rel(walkRoot, path) + if relErr == nil { + relDir = strings.ReplaceAll(relDir, "\\", "/") + if ignoreMatcher.CanSkipDir(relDir) { + return filepath.SkipDir + } + } + } return nil } @@ -55,6 +69,11 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { } relPath = strings.ReplaceAll(relPath, "\\", "/") + // Apply .agentignore matching + if ignoreMatcher.HasRules() && ignoreMatcher.Match(relPath, false) { + return nil + } + name := agentNameFromFile(path, info.Name()) isNested := strings.Contains(relPath, "/") diff --git a/internal/resource/kind_test.go b/internal/resource/kind_test.go index a3ceb43e..b46f4121 100644 --- a/internal/resource/kind_test.go +++ b/internal/resource/kind_test.go @@ -298,6 +298,29 @@ func TestAgentKind_Discover_EmptyDir(t *testing.T) { } } +func TestAgentKind_Discover_RespectsAgentignore(t *testing.T) { + dir := t.TempDir() + + os.WriteFile(filepath.Join(dir, "active.md"), []byte("# Active"), 0o644) + os.WriteFile(filepath.Join(dir, "ignored.md"), []byte("# Ignored"), 0o644) + + // Create .agentignore + os.WriteFile(filepath.Join(dir, ".agentignore"), []byte("ignored.md\n"), 0o644) + + k := AgentKind{} + resources, err := k.Discover(dir) + if err != nil { + t.Fatalf("Discover error: %v", err) + } + + if len(resources) != 1 { + t.Fatalf("expected 1 resource (ignored filtered out), got %d", len(resources)) + } + if resources[0].Name != "active" { + t.Errorf("Name = %q, want %q", resources[0].Name, "active") + } +} + func TestAgentKind_Discover_SkipsGitDir(t *testing.T) { dir := t.TempDir() diff --git a/internal/skillignore/skillignore.go b/internal/skillignore/skillignore.go index 63bb4059..2ef59d13 100644 --- a/internal/skillignore/skillignore.go +++ b/internal/skillignore/skillignore.go @@ -135,6 +135,31 @@ func ReadMatcher(dir string) *Matcher { return m } +// ReadAgentIgnoreMatcher reads .agentignore (and .agentignore.local) from dir +// and returns a compiled Matcher. Same gitignore-style pattern format as .skillignore. +func ReadAgentIgnoreMatcher(dir string) *Matcher { + var lines []string + var hasLocal bool + + data, err := os.ReadFile(filepath.Join(dir, ".agentignore")) + if err == nil { + lines = strings.Split(string(data), "\n") + } + + localData, localErr := os.ReadFile(filepath.Join(dir, ".agentignore.local")) + if localErr == nil { + hasLocal = true + lines = append(lines, strings.Split(string(localData), "\n")...) + } + + if len(lines) == 0 { + return &Matcher{} + } + m := Compile(lines) + m.HasLocal = hasLocal + return m +} + // HasRules reports whether the matcher has any compiled rules. func (m *Matcher) HasRules() bool { return m != nil && len(m.rules) > 0 diff --git a/internal/validate/agent.go b/internal/validate/agent.go new file mode 100644 index 00000000..ea824fb7 --- /dev/null +++ b/internal/validate/agent.go @@ -0,0 +1,84 @@ +package validate + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "strings" +) + +// agentFileNameRegex allows letters, numbers, underscores, hyphens, and dots. +// Must end with .md (case-insensitive checked separately). +var agentFileNameRegex = regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_.-]*$`) + +// AgentFileSizeWarningThreshold is the size above which a warning is issued. +const AgentFileSizeWarningThreshold = 100 * 1024 // 100KB + +// AgentValidationResult holds the result of validating an agent file. +type AgentValidationResult struct { + Valid bool + Errors []string + Warnings []string +} + +// AgentFile validates a single agent .md file. +func AgentFile(filePath string) AgentValidationResult { + result := AgentValidationResult{Valid: true} + + fileName := filepath.Base(filePath) + + // Must be .md extension + if !strings.HasSuffix(strings.ToLower(fileName), ".md") { + result.Valid = false + result.Errors = append(result.Errors, fmt.Sprintf("agent file must have .md extension, got %q", fileName)) + return result + } + + // Filename restrictions: no spaces or special chars + if !agentFileNameRegex.MatchString(fileName) { + result.Valid = false + result.Errors = append(result.Errors, fmt.Sprintf("agent filename %q contains invalid characters (spaces, special chars not allowed)", fileName)) + } + + // Check file exists and size + info, err := os.Stat(filePath) + if err != nil { + if os.IsNotExist(err) { + result.Valid = false + result.Errors = append(result.Errors, "agent file does not exist") + } + return result + } + + if info.IsDir() { + result.Valid = false + result.Errors = append(result.Errors, "path is a directory, not a file") + return result + } + + if info.Size() > int64(AgentFileSizeWarningThreshold) { + result.Warnings = append(result.Warnings, + fmt.Sprintf("agent file is %dKB (>100KB) — large agents may slow down AI tools", info.Size()/1024)) + } + + return result +} + +// AgentName validates an agent name (derived from filename). +func AgentName(name string) error { + if name == "" { + return fmt.Errorf("agent name cannot be empty") + } + + if len(name) > 128 { + return fmt.Errorf("agent name too long (max 128 characters)") + } + + nameRegex := regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_-]*$`) + if !nameRegex.MatchString(name) { + return fmt.Errorf("agent name must start with a letter or number and contain only letters, numbers, underscores, and hyphens") + } + + return nil +} diff --git a/internal/validate/agent_test.go b/internal/validate/agent_test.go new file mode 100644 index 00000000..c25a8611 --- /dev/null +++ b/internal/validate/agent_test.go @@ -0,0 +1,89 @@ +package validate + +import ( + "os" + "path/filepath" + "strings" + "testing" +) + +func TestAgentFile_Valid(t *testing.T) { + dir := t.TempDir() + f := filepath.Join(dir, "tutor.md") + os.WriteFile(f, []byte("# Tutor"), 0644) + + r := AgentFile(f) + if !r.Valid { + t.Errorf("expected valid, got errors: %v", r.Errors) + } + if len(r.Warnings) != 0 { + t.Errorf("expected no warnings, got: %v", r.Warnings) + } +} + +func TestAgentFile_WrongExtension(t *testing.T) { + dir := t.TempDir() + f := filepath.Join(dir, "tutor.txt") + os.WriteFile(f, []byte("content"), 0644) + + r := AgentFile(f) + if r.Valid { + t.Error("expected invalid for non-.md file") + } +} + +func TestAgentFile_InvalidFilename(t *testing.T) { + dir := t.TempDir() + f := filepath.Join(dir, "my agent.md") + os.WriteFile(f, []byte("# Agent"), 0644) + + r := AgentFile(f) + if r.Valid { + t.Error("expected invalid for filename with spaces") + } +} + +func TestAgentFile_Oversized(t *testing.T) { + dir := t.TempDir() + f := filepath.Join(dir, "big.md") + os.WriteFile(f, []byte(strings.Repeat("x", 200*1024)), 0644) + + r := AgentFile(f) + if !r.Valid { + t.Error("oversized file should be valid (warning only)") + } + if len(r.Warnings) != 1 { + t.Errorf("expected 1 warning, got %d", len(r.Warnings)) + } +} + +func TestAgentFile_NotExist(t *testing.T) { + r := AgentFile("/nonexistent/agent.md") + if r.Valid { + t.Error("expected invalid for nonexistent file") + } +} + +func TestAgentFile_Directory(t *testing.T) { + dir := t.TempDir() + r := AgentFile(dir) + if r.Valid { + t.Error("expected invalid for directory path") + } +} + +func TestAgentName_Valid(t *testing.T) { + for _, name := range []string{"tutor", "math-tutor", "code_review", "a1"} { + if err := AgentName(name); err != nil { + t.Errorf("AgentName(%q) should be valid, got: %v", name, err) + } + } +} + +func TestAgentName_Invalid(t *testing.T) { + for _, name := range []string{"", "-start", "has space", strings.Repeat("a", 129)} { + if err := AgentName(name); err == nil { + t.Errorf("AgentName(%q) should be invalid", name) + } + } +} From 6f2932c650529f7ac18b6d970b86f3a5a5813e96 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:10:17 +0800 Subject: [PATCH 009/205] feat(api): add agent support to overview, skills, and targets endpoints Add agentsSource() helper to Server for mode-aware agent directory resolution (global: EffectiveAgentsSource, project: .skillshare/agents). handler_overview: add agentCount field counting .md files in agents dir. handler_skills: add kind field ("skill"/"agent") to skillItem response. Support ?kind= query param filter. List agents from agents source dir alongside skills, with metadata enrichment from per-agent meta files. handler_targets: add agentPath, agentLinkedCount, agentExpectedCount fields to targetItem for frontend agent path display. --- internal/server/handler_overview.go | 13 +++++ internal/server/handler_skills.go | 90 +++++++++++++++++++++-------- internal/server/handler_targets.go | 3 + internal/server/server.go | 9 +++ 4 files changed, 90 insertions(+), 25 deletions(-) diff --git a/internal/server/handler_overview.go b/internal/server/handler_overview.go index 5fd86120..0a7da3ad 100644 --- a/internal/server/handler_overview.go +++ b/internal/server/handler_overview.go @@ -23,6 +23,7 @@ func (s *Server) handleOverview(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() cfgMode := s.cfg.Mode targetCount := len(s.cfg.Targets) projectRoot := s.projectRoot @@ -54,9 +55,21 @@ func (s *Server) handleOverview(w http.ResponseWriter, r *http.Request) { // Tracked repos trackedRepos := buildTrackedRepos(source, skills) + // Count agents + agentCount := 0 + if agentsSource != "" { + agentEntries, _ := os.ReadDir(agentsSource) + for _, e := range agentEntries { + if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + agentCount++ + } + } + } + resp := map[string]any{ "source": source, "skillCount": len(skills), + "agentCount": agentCount, "topLevelCount": topLevelCount, "targetCount": targetCount, "mode": mode, diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 26b89b3b..3e96ff20 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -19,6 +19,7 @@ import ( type skillItem struct { Name string `json:"name"` + Kind string `json:"kind"` // "skill" or "agent" FlatName string `json:"flatName"` RelPath string `json:"relPath"` SourcePath string `json:"sourcePath"` @@ -44,41 +45,80 @@ func enrichSkillBranch(item *skillItem) { } func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { + kindFilter := r.URL.Query().Get("kind") // "", "skill", "agent" + // Snapshot config under RLock, then release before I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() s.mu.RUnlock() - discovered, err := sync.DiscoverSourceSkillsAll(source) - if err != nil { - writeError(w, http.StatusInternalServerError, err.Error()) - return - } + var items []skillItem - items := make([]skillItem, 0, len(discovered)) - for _, d := range discovered { - item := skillItem{ - Name: filepath.Base(d.SourcePath), - FlatName: d.FlatName, - RelPath: d.RelPath, - SourcePath: d.SourcePath, - IsInRepo: d.IsInRepo, - Targets: d.Targets, - Disabled: d.Disabled, + // Skills + if kindFilter == "" || kindFilter == "skill" { + discovered, err := sync.DiscoverSourceSkillsAll(source) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return } - // Enrich with metadata if available - if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version - item.Branch = meta.Branch + for _, d := range discovered { + item := skillItem{ + Name: filepath.Base(d.SourcePath), + Kind: "skill", + FlatName: d.FlatName, + RelPath: d.RelPath, + SourcePath: d.SourcePath, + IsInRepo: d.IsInRepo, + Targets: d.Targets, + Disabled: d.Disabled, + } + + if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { + item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") + item.Source = meta.Source + item.Type = meta.Type + item.RepoURL = meta.RepoURL + item.Version = meta.Version + item.Branch = meta.Branch + } + enrichSkillBranch(&item) + + items = append(items, item) } - enrichSkillBranch(&item) + } + + // Agents + if (kindFilter == "" || kindFilter == "agent") && agentsSource != "" { + agentEntries, _ := os.ReadDir(agentsSource) + for _, e := range agentEntries { + if e.IsDir() || !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + continue + } + agentName := strings.TrimSuffix(e.Name(), ".md") + agentPath := filepath.Join(agentsSource, e.Name()) + + item := skillItem{ + Name: agentName, + Kind: "agent", + FlatName: e.Name(), + RelPath: e.Name(), + SourcePath: agentPath, + } + + // Check for agent metadata + metaPath := filepath.Join(agentsSource, agentName+".skillshare-meta.json") + if meta, _ := install.ReadMeta(metaPath); meta != nil { + item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") + item.Source = meta.Source + item.Type = meta.Type + item.RepoURL = meta.RepoURL + item.Version = meta.Version + } - items = append(items, item) + items = append(items, item) + } } writeJSON(w, map[string]any{"skills": items}) diff --git a/internal/server/handler_targets.go b/internal/server/handler_targets.go index 1c2a9f11..2c1d9f43 100644 --- a/internal/server/handler_targets.go +++ b/internal/server/handler_targets.go @@ -26,6 +26,9 @@ type targetItem struct { ExpectedSkillCount int `json:"expectedSkillCount"` SkippedSkillCount int `json:"skippedSkillCount,omitempty"` CollisionCount int `json:"collisionCount,omitempty"` + AgentPath string `json:"agentPath,omitempty"` + AgentLinkedCount int `json:"agentLinkedCount,omitempty"` + AgentExpectedCount int `json:"agentExpectedCount,omitempty"` } func (s *Server) handleListTargets(w http.ResponseWriter, r *http.Request) { diff --git a/internal/server/server.go b/internal/server/server.go index 4bd2eece..f46dfed4 100644 --- a/internal/server/server.go +++ b/internal/server/server.go @@ -129,6 +129,15 @@ func (s *Server) IsProjectMode() bool { return s.projectRoot != "" } +// agentsSource returns the agents source directory for the current mode. +// Caller must hold s.mu (RLock or Lock) when accessing s.cfg. +func (s *Server) agentsSource() string { + if s.IsProjectMode() { + return filepath.Join(s.projectRoot, ".skillshare", "agents") + } + return s.cfg.EffectiveAgentsSource() +} + // cloneTargets returns a shallow copy of the Targets map. // Callers must hold s.mu (RLock or Lock). func (s *Server) cloneTargets() map[string]config.TargetConfig { From 5536cac794d77570a2eb8e5270eb3129aa156074 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:16:36 +0800 Subject: [PATCH 010/205] feat(ui): add agent kind types to API client and KindBadge component Update TypeScript types to match backend agent support: - Skill: add kind field ('skill' | 'agent') - Overview: add agentCount field - Target: add agentPath, agentLinkedCount, agentExpectedCount - AuditFinding: add optional kind field - TrashedSkill: add optional kind field Update listSkills() to accept optional kind filter param (?kind=agent). Add KindBadge component rendering [S] (info) or [A] (accent) badges using the existing Badge component. --- ui/src/api/client.ts | 10 +++++++++- ui/src/components/KindBadge.tsx | 13 +++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 ui/src/components/KindBadge.tsx diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index d1af84da..c2b7c05a 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -125,7 +125,8 @@ export const api = { getOverview: () => apiFetch('/overview'), // Skills - listSkills: () => apiFetch<{ skills: Skill[] }>('/skills'), + listSkills: (kind?: 'skill' | 'agent') => + apiFetch<{ skills: Skill[] }>(kind ? `/skills?kind=${kind}` : '/skills'), getSkill: (name: string) => apiFetch<{ skill: Skill; skillMdContent: string; files: string[] }>(`/skills/${encodeURIComponent(name)}`), deleteSkill: (name: string) => @@ -481,6 +482,7 @@ export interface TrackedRepo { export interface Overview { source: string; skillCount: number; + agentCount: number; topLevelCount: number; targetCount: number; mode: string; @@ -501,6 +503,7 @@ export interface VersionCheck { export interface Skill { name: string; + kind: 'skill' | 'agent'; flatName: string; relPath: string; sourcePath: string; @@ -561,6 +564,9 @@ export interface Target { expectedSkillCount: number; skippedSkillCount?: number; collisionCount?: number; + agentPath?: string; + agentLinkedCount?: number; + agentExpectedCount?: number; } export interface SyncResult { @@ -720,6 +726,7 @@ export interface CollectResult { // Trash types export interface TrashedSkill { name: string; + kind?: 'skill' | 'agent'; timestamp: string; date: string; size: number; @@ -853,6 +860,7 @@ export interface LogStatsResponse { // Audit types export interface AuditFinding { severity: 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO'; + kind?: 'skill' | 'agent'; pattern: string; message: string; file: string; diff --git a/ui/src/components/KindBadge.tsx b/ui/src/components/KindBadge.tsx new file mode 100644 index 00000000..bbc36b1b --- /dev/null +++ b/ui/src/components/KindBadge.tsx @@ -0,0 +1,13 @@ +import Badge from './Badge'; + +interface KindBadgeProps { + kind: 'skill' | 'agent'; + size?: 'sm' | 'md'; +} + +export default function KindBadge({ kind, size = 'sm' }: KindBadgeProps) { + if (kind === 'agent') { + return A; + } + return S; +} From 47da532bbe4160512099dfe55c6a82d844ee34a4 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:19:52 +0800 Subject: [PATCH 011/205] feat(cli,api): add agent info to status, doctor, and trash handler status: show agents source path and agent count alongside skills source. doctor: add checkAgentsSource() health check verifying agents source directory exists, is a directory, and reports agent count. Non-existent dir is info (not error) since agents are optional. handler_trash: add kind field to trashItemJSON for frontend badge. --- cmd/skillshare/doctor.go | 34 ++++++++++++++++++++++++++++++++ cmd/skillshare/status.go | 14 +++++++++++++ internal/server/handler_trash.go | 28 ++++++++++++++++++++++++-- 3 files changed, 74 insertions(+), 2 deletions(-) diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index 7d06c3b8..9700e57a 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -220,6 +220,7 @@ func runDoctorChecks(cfg *config.Config, result *doctorResult, isProject bool) { sp.Stop() checkSource(cfg, result, discovered, discoverErr) + checkAgentsSource(cfg, result) checkSkillignore(result, stats) checkSymlinkSupport(result) @@ -303,6 +304,39 @@ func checkSource(cfg *config.Config, result *doctorResult, discovered []sync.Dis result.addCheck("source", checkPass, fmt.Sprintf("Source: %s (%d skills)", cfg.Source, skillCount), nil) } +func checkAgentsSource(cfg *config.Config, result *doctorResult) { + agentsSource := cfg.EffectiveAgentsSource() + info, err := os.Stat(agentsSource) + if err != nil { + if os.IsNotExist(err) { + ui.Info("Agents source: %s (not created yet)", agentsSource) + result.addCheck("agents_source", checkPass, fmt.Sprintf("Agents source: %s (not created yet)", agentsSource), nil) + return + } + ui.Error("Agents source error: %s", err) + result.addError() + result.addCheck("agents_source", checkError, fmt.Sprintf("Agents source error: %v", err), nil) + return + } + + if !info.IsDir() { + ui.Error("Agents source is not a directory: %s", agentsSource) + result.addError() + result.addCheck("agents_source", checkError, fmt.Sprintf("Agents source is not a directory: %s", agentsSource), nil) + return + } + + agentCount := 0 + entries, _ := os.ReadDir(agentsSource) + for _, e := range entries { + if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + agentCount++ + } + } + ui.Success("Agents source: %s (%d agents)", agentsSource, agentCount) + result.addCheck("agents_source", checkPass, fmt.Sprintf("Agents source: %s (%d agents)", agentsSource, agentCount), nil) +} + func checkSymlinkSupport(result *doctorResult) { testLink := filepath.Join(os.TempDir(), "skillshare_symlink_test") testTarget := filepath.Join(os.TempDir(), "skillshare_symlink_target") diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index dd4f2c0b..1412d4a7 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -222,6 +222,20 @@ func printSourceStatus(cfg *config.Config, skillCount int, stats *skillignore.Ig ui.Success("%s (%d skills, %s)", cfg.Source, skillCount, info.ModTime().Format("2006-01-02 15:04")) printSkillignoreLine(stats) + + // Agents source + agentsSource := cfg.EffectiveAgentsSource() + if agentsInfo, agentsErr := os.Stat(agentsSource); agentsErr == nil { + agentCount := 0 + if entries, readErr := os.ReadDir(agentsSource); readErr == nil { + for _, e := range entries { + if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + agentCount++ + } + } + } + ui.Success("%s (%d agents, %s)", agentsSource, agentCount, agentsInfo.ModTime().Format("2006-01-02 15:04")) + } } func printSkillignoreLine(stats *skillignore.IgnoreStats) { diff --git a/internal/server/handler_trash.go b/internal/server/handler_trash.go index f1fad398..5dc9e105 100644 --- a/internal/server/handler_trash.go +++ b/internal/server/handler_trash.go @@ -10,6 +10,7 @@ import ( type trashItemJSON struct { Name string `json:"name"` + Kind string `json:"kind,omitempty"` Timestamp string `json:"timestamp"` Date string `json:"date"` Size int64 `json:"size"` @@ -24,18 +25,40 @@ func (s *Server) trashBase() string { return trash.TrashDir() } +// agentTrashBase returns the agent trash directory for the current mode. +func (s *Server) agentTrashBase() string { + if s.IsProjectMode() { + return trash.ProjectAgentTrashDir(s.projectRoot) + } + return trash.AgentTrashDir() +} + // handleListTrash returns all trashed items with total size. func (s *Server) handleListTrash(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() base := s.trashBase() + agentBase := s.agentTrashBase() s.mu.RUnlock() + items := trash.List(base) + agentItems := trash.List(agentBase) - out := make([]trashItemJSON, 0, len(items)) + out := make([]trashItemJSON, 0, len(items)+len(agentItems)) for _, item := range items { out = append(out, trashItemJSON{ Name: item.Name, + Kind: "skill", + Timestamp: item.Timestamp, + Date: item.Date.Format("2006-01-02T15:04:05Z07:00"), + Size: item.Size, + Path: item.Path, + }) + } + for _, item := range agentItems { + out = append(out, trashItemJSON{ + Name: item.Name, + Kind: "agent", Timestamp: item.Timestamp, Date: item.Date.Format("2006-01-02T15:04:05Z07:00"), Size: item.Size, @@ -43,9 +66,10 @@ func (s *Server) handleListTrash(w http.ResponseWriter, r *http.Request) { }) } + totalSize := trash.TotalSize(base) + trash.TotalSize(agentBase) writeJSON(w, map[string]any{ "items": out, - "totalSize": trash.TotalSize(base), + "totalSize": totalSize, }) } From 9a053a35b6d50e6d370a1d2f42878abb275505bf Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:23:48 +0800 Subject: [PATCH 012/205] feat(ui): add agent stat card, kind filter tabs, and kind fields to API handlers DashboardPage: add Agents stat card with Bot icon and accent color, linking to /skills?kind=agent. SkillsPage: add Skills/Agents filter tabs alongside existing filters. KindBadge displayed on each skill card. matchFilter supports kind-based filtering with accurate filter counts. handler_collect: add kind field to localSkillItem response. handler_uninstall: add kind field to batch request and result structs. --- internal/server/handler_collect.go | 2 ++ internal/server/handler_uninstall.go | 2 ++ ui/src/pages/DashboardPage.tsx | 10 ++++++++++ ui/src/pages/SkillsPage.tsx | 17 +++++++++++++++-- 4 files changed, 29 insertions(+), 2 deletions(-) diff --git a/internal/server/handler_collect.go b/internal/server/handler_collect.go index a0efa57d..87220ec6 100644 --- a/internal/server/handler_collect.go +++ b/internal/server/handler_collect.go @@ -14,6 +14,7 @@ import ( type localSkillItem struct { Name string `json:"name"` + Kind string `json:"kind,omitempty"` Path string `json:"path"` TargetName string `json:"targetName"` Size int64 `json:"size"` @@ -67,6 +68,7 @@ func (s *Server) handleCollectScan(w http.ResponseWriter, r *http.Request) { for _, sk := range locals { items = append(items, localSkillItem{ Name: sk.Name, + Kind: "skill", Path: sk.Path, TargetName: name, Size: ssync.CalculateDirSize(sk.Path), diff --git a/internal/server/handler_uninstall.go b/internal/server/handler_uninstall.go index 974560a9..ef0c10ce 100644 --- a/internal/server/handler_uninstall.go +++ b/internal/server/handler_uninstall.go @@ -18,11 +18,13 @@ import ( type batchUninstallRequest struct { Names []string `json:"names"` + Kind string `json:"kind,omitempty"` Force bool `json:"force"` } type batchUninstallItemResult struct { Name string `json:"name"` + Kind string `json:"kind,omitempty"` Success bool `json:"success"` MovedToTrash bool `json:"movedToTrash,omitempty"` Error string `json:"error,omitempty"` diff --git a/ui/src/pages/DashboardPage.tsx b/ui/src/pages/DashboardPage.tsx index e9f58521..a4009b75 100644 --- a/ui/src/pages/DashboardPage.tsx +++ b/ui/src/pages/DashboardPage.tsx @@ -18,6 +18,7 @@ import { ShieldAlert, FolderPlus, LayoutDashboard, + Bot, } from 'lucide-react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import { queryKeys, staleTimes } from '../lib/queryKeys'; @@ -115,6 +116,15 @@ export default function DashboardPage() { bg: 'bg-info-light', to: '/skills', }, + { + label: 'Agents', + value: data.agentCount, + subtitle: 'installed', + icon: Bot, + color: 'text-accent', + bg: 'bg-accent/10', + to: '/skills?kind=agent', + }, { label: 'Targets', value: data.targetCount, diff --git a/ui/src/pages/SkillsPage.tsx b/ui/src/pages/SkillsPage.tsx index 66534c3c..6fcc39fc 100644 --- a/ui/src/pages/SkillsPage.tsx +++ b/ui/src/pages/SkillsPage.tsx @@ -24,12 +24,14 @@ import { ExternalLink, MousePointerClick, X, + Bot, } from 'lucide-react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; import { VirtuosoGrid, Virtuoso } from 'react-virtuoso'; import type { GridComponents } from 'react-virtuoso'; import { queryKeys, staleTimes } from '../lib/queryKeys'; import Badge from '../components/Badge'; +import KindBadge from '../components/KindBadge'; import { Input, Select, type SelectOption } from '../components/Input'; import { PageSkeleton } from '../components/Skeleton'; import EmptyState from '../components/EmptyState'; @@ -464,12 +466,14 @@ function saveCollapsed(collapsed: Set) { /* -- Filter, Sort & View types -------------------- */ -type FilterType = 'all' | 'tracked' | 'github' | 'local'; +type FilterType = 'all' | 'skills' | 'agents' | 'tracked' | 'github' | 'local'; type SortType = 'name-asc' | 'name-desc' | 'newest' | 'oldest'; type ViewType = 'grid' | 'grouped' | 'table'; const filterOptions: { key: FilterType; label: string; icon: React.ReactNode }[] = [ { key: 'all', label: 'All', icon: }, + { key: 'skills', label: 'Skills', icon: }, + { key: 'agents', label: 'Agents', icon: }, { key: 'tracked', label: 'Tracked', icon: }, { key: 'github', label: 'GitHub', icon: }, { key: 'local', label: 'Local', icon: }, @@ -479,6 +483,10 @@ function matchFilter(skill: Skill, filterType: FilterType): boolean { switch (filterType) { case 'all': return true; + case 'skills': + return skill.kind !== 'agent'; + case 'agents': + return skill.kind === 'agent'; case 'tracked': return skill.isInRepo; case 'github': @@ -599,7 +607,8 @@ const SkillPostit = memo(function SkillPostit({ : } -

+

+ {skill.kind && } {skill.name}

@@ -736,11 +745,15 @@ export default function SkillsPage() { const filterCounts = useMemo(() => { const counts: Record = { all: skills.length, + skills: 0, + agents: 0, tracked: 0, github: 0, local: 0, }; for (const s of skills) { + if (s.kind === 'agent') counts.agents++; + else counts.skills++; if (s.isInRepo) counts.tracked++; if ((s.type === 'github' || s.type === 'github-subdir') && !s.isInRepo) counts.github++; if (!s.type && !s.isInRepo) counts.local++; From 017dc22f3eff8ee2f5decf365ebca634191fdbe8 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:26:33 +0800 Subject: [PATCH 013/205] feat: add --kind agent to enable/disable and agent path to TargetsPage enable.go: support --kind agent flag to route enable/disable operations to .agentignore instead of .skillignore. Works in both global and project modes. Oplog entries now include kind field. TargetsPage.tsx: show agent path below skill path when target has agentPath. Display agent linked count alongside skill counts. --- cmd/skillshare/enable.go | 34 ++++++++++++++++++++++++++++------ ui/src/pages/TargetsPage.tsx | 8 ++++++++ 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/cmd/skillshare/enable.go b/cmd/skillshare/enable.go index 8ccef6a2..d747b444 100644 --- a/cmd/skillshare/enable.go +++ b/cmd/skillshare/enable.go @@ -33,6 +33,12 @@ func cmdToggleSkill(args []string, enable bool) error { return err } + // Extract --kind flag before parsing other args + kind, rest, err := parseKindFlag(rest) + if err != nil { + return err + } + var dryRun bool var patterns []string for _, arg := range rest { @@ -68,20 +74,35 @@ func cmdToggleSkill(args []string, enable bool) error { } applyModeLabel(mode) + isAgent := kind == kindAgents + var ignorePath string var cfgPath string if mode == modeProject { - ignorePath = filepath.Join(cwd, ".skillshare", "skills", ".skillignore") + if isAgent { + ignorePath = filepath.Join(cwd, ".skillshare", "agents", ".agentignore") + } else { + ignorePath = filepath.Join(cwd, ".skillshare", "skills", ".skillignore") + } cfgPath = config.ProjectConfigPath(cwd) } else { cfg, err := config.Load() if err != nil { return fmt.Errorf("failed to load config: %w", err) } - ignorePath = filepath.Join(cfg.Source, ".skillignore") + if isAgent { + ignorePath = filepath.Join(cfg.EffectiveAgentsSource(), ".agentignore") + } else { + ignorePath = filepath.Join(cfg.Source, ".skillignore") + } cfgPath = config.ConfigPath() } + ignoreLabel := ".skillignore" + if isAgent { + ignoreLabel = ".agentignore" + } + changed := false for _, pattern := range patterns { if dryRun { @@ -96,25 +117,25 @@ func cmdToggleSkill(args []string, enable bool) error { if enable { removed, err := skillignore.RemovePattern(ignorePath, pattern) if err != nil { - return fmt.Errorf("failed to update .skillignore: %w", err) + return fmt.Errorf("failed to update %s: %w", ignoreLabel, err) } if !removed { ui.Warning("%s is not disabled", pattern) continue } changed = true - ui.Success("Enabled: %s (removed from .skillignore)", pattern) + ui.Success("Enabled: %s (removed from %s)", pattern, ignoreLabel) } else { added, err := skillignore.AddPattern(ignorePath, pattern) if err != nil { - return fmt.Errorf("failed to update .skillignore: %w", err) + return fmt.Errorf("failed to update %s: %w", ignoreLabel, err) } if !added { ui.Warning("%s is already disabled", pattern) continue } changed = true - ui.Success("Disabled: %s (added to .skillignore)", pattern) + ui.Success("Disabled: %s (added to %s)", pattern, ignoreLabel) } } @@ -124,6 +145,7 @@ func cmdToggleSkill(args []string, enable bool) error { e := oplog.NewEntry(action, "ok", time.Since(start)) e.Args = map[string]any{ "patterns": patterns, + "kind": kind.String(), } oplog.Write(cfgPath, oplog.OpsFile, e) } diff --git a/ui/src/pages/TargetsPage.tsx b/ui/src/pages/TargetsPage.tsx index a6ce8972..b34ed70b 100644 --- a/ui/src/pages/TargetsPage.tsx +++ b/ui/src/pages/TargetsPage.tsx @@ -382,6 +382,11 @@ export default function TargetsPage() {

{shortenHome(target.path)}

+ {target.agentPath && ( +

+ agent: {shortenHome(target.agentPath)} +

+ )}
{(target.mode === 'merge' || target.mode === 'copy') && target.localCount > 0 && ( @@ -431,6 +436,9 @@ export default function TargetsPage() { ) : ( <>{target.linkedCount} {target.mode === 'copy' ? 'managed' : 'shared'}, {target.localCount} local )} + {target.agentLinkedCount != null && target.agentLinkedCount > 0 && ( + <>, {target.agentLinkedCount} agent{target.agentLinkedCount !== 1 ? 's' : ''} + )} )}
From 4c43506df39f6a28605ed502bf1d3c0f0ad7467f Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:29:16 +0800 Subject: [PATCH 014/205] feat: agent detail endpoint, project reconcile, and TrashPage badge handler_skills: skill detail endpoint falls back to agents source when skill not found. Returns agent .md file content and kind='agent'. project_reconcile: add ReconcileProjectAgents() to scan agents source, update registry with kind='agent' entries, and batch-update .skillshare/.gitignore for agent files and metadata. TrashPage: add KindBadge next to trashed item names with 'skill' fallback for old trash items without kind field. --- internal/config/project_reconcile.go | 68 ++++++++++++++++++++++++++++ internal/server/handler_skills.go | 33 ++++++++++++++ ui/src/pages/TrashPage.tsx | 2 + 3 files changed, 103 insertions(+) diff --git a/internal/config/project_reconcile.go b/internal/config/project_reconcile.go index bee61b25..aa8f94b8 100644 --- a/internal/config/project_reconcile.go +++ b/internal/config/project_reconcile.go @@ -162,6 +162,74 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * return nil } +// ReconcileProjectAgents scans the project agents source directory for +// installed agents and ensures they are listed in the registry with kind="agent". +// Also updates .skillshare/.gitignore for each agent. +func ReconcileProjectAgents(projectRoot string, reg *Registry, agentsSourcePath string) error { + if _, err := os.Stat(agentsSourcePath); os.IsNotExist(err) { + return nil + } + + entries, err := os.ReadDir(agentsSourcePath) + if err != nil { + return nil + } + + changed := false + index := map[string]bool{} + for _, s := range reg.Skills { + if s.EffectiveKind() == "agent" { + index[s.Name] = true + } + } + + var gitignoreEntries []string + + for _, entry := range entries { + name := entry.Name() + if entry.IsDir() || !strings.HasSuffix(strings.ToLower(name), ".md") { + continue + } + + agentName := strings.TrimSuffix(name, ".md") + + // Check for metadata + metaPath := filepath.Join(agentsSourcePath, agentName+".skillshare-meta.json") + meta, _ := install.ReadMeta(metaPath) + if meta == nil || meta.Source == "" { + continue // local agent, not installed + } + + if !index[agentName] { + reg.Skills = append(reg.Skills, SkillEntry{ + Name: agentName, + Kind: "agent", + Source: meta.Source, + }) + index[agentName] = true + changed = true + } + + gitignoreEntries = append(gitignoreEntries, filepath.Join("agents", name)) + // Also ignore the metadata file + gitignoreEntries = append(gitignoreEntries, filepath.Join("agents", agentName+".skillshare-meta.json")) + } + + if len(gitignoreEntries) > 0 { + if err := install.UpdateGitIgnoreBatch(filepath.Join(projectRoot, ".skillshare"), gitignoreEntries); err != nil { + return fmt.Errorf("failed to update .skillshare/.gitignore for agents: %w", err) + } + } + + if changed { + if err := reg.Save(filepath.Join(projectRoot, ".skillshare")); err != nil { + return err + } + } + + return nil +} + // isGitRepo checks if the given path is a git repository (has .git/ directory or file). func isGitRepo(path string) bool { _, err := os.Stat(filepath.Join(path, ".git")) diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 3e96ff20..82a07583 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -128,6 +128,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() s.mu.RUnlock() name := r.PathValue("name") @@ -147,6 +148,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { item := skillItem{ Name: baseName, + Kind: "skill", FlatName: d.FlatName, RelPath: d.RelPath, SourcePath: d.SourcePath, @@ -198,6 +200,37 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { return } + // Fallback: check agents source + if agentsSource != "" { + agentFile := name + ".md" + agentPath := filepath.Join(agentsSource, agentFile) + if data, err := os.ReadFile(agentPath); err == nil { + item := skillItem{ + Name: name, + Kind: "agent", + FlatName: agentFile, + RelPath: agentFile, + SourcePath: agentPath, + } + + metaPath := filepath.Join(agentsSource, name+".skillshare-meta.json") + if meta, _ := install.ReadMeta(metaPath); meta != nil { + item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") + item.Source = meta.Source + item.Type = meta.Type + item.RepoURL = meta.RepoURL + item.Version = meta.Version + } + + writeJSON(w, map[string]any{ + "skill": item, + "skillMdContent": string(data), + "files": []string{agentFile}, + }) + return + } + } + writeError(w, http.StatusNotFound, "skill not found: "+name) } diff --git a/ui/src/pages/TrashPage.tsx b/ui/src/pages/TrashPage.tsx index e3a2cb8c..416d3aec 100644 --- a/ui/src/pages/TrashPage.tsx +++ b/ui/src/pages/TrashPage.tsx @@ -20,6 +20,7 @@ import ConfirmDialog from '../components/ConfirmDialog'; import EmptyState from '../components/EmptyState'; import { PageSkeleton } from '../components/Skeleton'; import { useToast } from '../components/Toast'; +import KindBadge from '../components/KindBadge'; function timeAgo(dateStr: string): string { const now = Date.now(); @@ -240,6 +241,7 @@ function TrashCard({
{item.name} + {timeAgo(item.date)} From 6659898cc27d3a80ee715b13ef79b0f2e5ec9d84 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:33:27 +0800 Subject: [PATCH 015/205] feat: add kind badge to SkillDetailPage and kind arg to audit command SkillDetailPage: import KindBadge, show kind badge (S/A) next to skill name in the detail header. audit: add parseKindArg() for 'skillshare audit agents' syntax. Kind filter parsing ready, actual agent-only filtering to be wired. --- cmd/skillshare/audit.go | 11 +++++++++++ ui/src/pages/SkillDetailPage.tsx | 2 ++ 2 files changed, 13 insertions(+) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index 43fbc864..f575a555 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -130,6 +130,10 @@ func cmdAudit(args []string) error { } applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare audit agents") before subcommand check. + kind, rest := parseKindArg(rest) + _ = kind // TODO: wire agent-only audit filtering + // Check for "rules" subcommand before standard audit arg parsing. if len(rest) > 0 && rest[0] == "rules" { return cmdAuditRules(mode, rest[1:]) @@ -161,6 +165,7 @@ func cmdAudit(args []string) error { var ( sourcePath string + agentsSourcePath string projectRoot string defaultThreshold string configProfile string @@ -196,6 +201,7 @@ func cmdAudit(args []string) error { return err } sourcePath = cfg.Source + agentsSourcePath = cfg.EffectiveAgentsSource() defaultThreshold = cfg.Audit.BlockThreshold configProfile = cfg.Audit.Profile configDedupe = cfg.Audit.DedupeMode @@ -203,6 +209,11 @@ func cmdAudit(args []string) error { cfgPath = config.ConfigPath() } + // When kind is agents-only, override sourcePath to the agents source directory. + if kind == kindAgents && agentsSourcePath != "" { + sourcePath = agentsSourcePath + } + policy := audit.ResolvePolicy(audit.PolicyInputs{ Profile: opts.Profile, Threshold: opts.Threshold, diff --git a/ui/src/pages/SkillDetailPage.tsx b/ui/src/pages/SkillDetailPage.tsx index 6d37444a..c0079ed0 100644 --- a/ui/src/pages/SkillDetailPage.tsx +++ b/ui/src/pages/SkillDetailPage.tsx @@ -10,6 +10,7 @@ import remarkGfm from 'remark-gfm'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import { queryKeys, staleTimes } from '../lib/queryKeys'; import Badge from '../components/Badge'; +import KindBadge from '../components/KindBadge'; import Card from '../components/Card'; import CopyButton from '../components/CopyButton'; import Button from '../components/Button'; @@ -370,6 +371,7 @@ export default function SkillDetailPage() { > {skill.name} + {skill.disabled && disabled} {skill.isInRepo && tracked repo} {skillTypeLabel(skill.type) && {skillTypeLabel(skill.type)}} From 57cfab71338869cc888c37d10dfdff9c80fc1764 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:35:44 +0800 Subject: [PATCH 016/205] feat(cli): add kind filter to check and uninstall commands check: add parseKindArg() for 'skillshare check agents'. When agents kind selected, runs CheckAgents() on agents source dir and displays per-agent status (up_to_date/drifted/local/error). Supports --json. uninstall: add --kind flag to parseUninstallArgs and parseKindFlag() in cmdUninstall dispatch. Kind stored in uninstallOptions for agent-specific uninstall routing. --- cmd/skillshare/check.go | 36 ++++++++++++++++++++++++++++++++++ cmd/skillshare/uninstall.go | 39 ++++++++++++++++++++++++++++++++++--- 2 files changed, 72 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 94c7f543..bc7f1b24 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -171,6 +171,9 @@ func cmdCheck(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare check agents") before arg parsing. + kind, rest := parseKindArg(rest) + scope := "global" if mode == modeProject { scope = "project" @@ -198,6 +201,39 @@ func cmdCheck(args []string) error { return err } + // Agent-only check: scan agents source directory and skip repo checks. + if kind == kindAgents { + agentsDir := cfg.EffectiveAgentsSource() + agentResults := check.CheckAgents(agentsDir) + if opts.json { + out, _ := json.MarshalIndent(agentResults, "", " ") + fmt.Println(string(out)) + } else { + ui.Header(ui.WithModeLabel("Checking agents")) + ui.StepStart("Agents source", agentsDir) + if len(agentResults) == 0 { + ui.Info("No agents found") + } else { + fmt.Println() + for _, r := range agentResults { + switch r.Status { + case "up_to_date": + ui.ListItem("success", r.Name, "up to date") + case "drifted": + ui.ListItem("warning", r.Name, r.Message) + case "local": + ui.ListItem("info", r.Name, "local agent") + case "error": + ui.ListItem("error", r.Name, r.Message) + } + } + } + fmt.Println() + } + logCheckOp(cfgPath, 0, len(agentResults), 0, 0, scope, start, nil) + return nil + } + // No names and no groups → check all (existing behavior) if len(opts.names) == 0 && len(opts.groups) == 0 { cmdErr := runCheck(cfg.Source, opts.json, targetNamesFromConfig(cfg.Targets)) diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 97eee8db..2127c433 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -23,9 +23,10 @@ import ( // uninstallOptions holds parsed arguments for uninstall command type uninstallOptions struct { - skillNames []string // positional args (0+) - groups []string // --group/-G values (repeatable) - all bool // --all: remove ALL skills from source + skillNames []string // positional args (0+) + groups []string // --group/-G values (repeatable) + kind resourceKindFilter // --kind skill|agent + all bool // --all: remove ALL skills from source force bool dryRun bool jsonOutput bool @@ -69,6 +70,19 @@ func parseUninstallArgs(args []string) (*uninstallOptions, bool, error) { opts.dryRun = true case arg == "--json": opts.jsonOutput = true + case arg == "--kind": + i++ + if i >= len(args) { + return nil, false, fmt.Errorf("--kind requires a value (skill or agent)") + } + switch strings.ToLower(args[i]) { + case "skill", "skills": + opts.kind = kindSkills + case "agent", "agents": + opts.kind = kindAgents + default: + return nil, false, fmt.Errorf("--kind must be 'skill' or 'agent', got %q", args[i]) + } case arg == "--group" || arg == "-G": i++ if i >= len(args) { @@ -549,6 +563,13 @@ func cmdUninstall(args []string) error { applyModeLabel(mode) + // Extract --kind flag before parsing other args. + kind, rest, err := parseKindFlag(rest) + if err != nil { + return err + } + _ = kind // TODO: wire agent-specific uninstall path + if mode == modeProject { err := cmdUninstallProject(rest, cwd) logUninstallOp(config.ProjectConfigPath(cwd), uninstallOpNames(rest), 0, start, err) @@ -577,6 +598,18 @@ func cmdUninstall(args []string) error { return fmt.Errorf("failed to load config: %w", err) } + // When --kind agent is set, resolve targets from the agents source directory. + if kind == kindAgents { + agentsDir := cfg.EffectiveAgentsSource() + for i, name := range opts.skillNames { + agentPath := filepath.Join(agentsDir, name+".md") + if _, statErr := os.Stat(agentPath); statErr == nil { + opts.skillNames[i] = agentPath + } + } + // TODO: implement full agent uninstall (remove .md + .skillshare-meta.json, update registry) + } + // --- Phase 1: RESOLVE --- var targets []*uninstallTarget seen := map[string]bool{} // dedup by path From 5dc981a5cb305e3559d614cc9cbfdd5cbef57e53 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:36:39 +0800 Subject: [PATCH 017/205] feat(audit): wire agent-only audit filtering via kind arg When 'skillshare audit agents' is used, override sourcePath to agents source directory so auditInstalled() scans agent .md files. --- cmd/skillshare/audit.go | 1 - 1 file changed, 1 deletion(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index f575a555..4cf20e4e 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -132,7 +132,6 @@ func cmdAudit(args []string) error { // Extract kind filter (e.g. "skillshare audit agents") before subcommand check. kind, rest := parseKindArg(rest) - _ = kind // TODO: wire agent-only audit filtering // Check for "rules" subcommand before standard audit arg parsing. if len(rest) > 0 && rest[0] == "rules" { From e29a42d2d10164f9feea9ec8b91a9d878666366e Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:39:10 +0800 Subject: [PATCH 018/205] feat(cli): add kind arg to update command and kind field to diff output update: add parseKindArg() for 'skillshare update agents' syntax. diff: add Kind field to diffJSONItem struct for agent-aware diff output. --- cmd/skillshare/diff.go | 1 + cmd/skillshare/update.go | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/cmd/skillshare/diff.go b/cmd/skillshare/diff.go index 41d1351c..9ccc172d 100644 --- a/cmd/skillshare/diff.go +++ b/cmd/skillshare/diff.go @@ -45,6 +45,7 @@ type diffJSONTarget struct { type diffJSONItem struct { Action string `json:"action"` Name string `json:"name"` + Kind string `json:"kind,omitempty"` // "skill" or "agent" Reason string `json:"reason"` IsSync bool `json:"is_sync"` } diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 73b2841d..84ba0b2f 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -130,6 +130,10 @@ func cmdUpdate(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare update agents") + kind, rest := parseKindArg(rest) + _ = kind // TODO: wire agent-only update path + if mode == modeProject { // Parse opts for logging (cmdUpdateProject parses again internally) projOpts, _, _ := parseUpdateArgs(rest) From c176d8940533ac793082123beffb5fb78b0f016e Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:39:37 +0800 Subject: [PATCH 019/205] feat(api): add kind field to sync, audit, check, update, and install handlers handler_sync: add Kind field to diff stream item struct. handler_audit: add Kind field to audit stream result. handler_check: add Kind field to check stream result. handler_update: add Kind field to update stream result. handler_install: add Kind field to install request for agent support. --- internal/server/handler_audit.go | 1 + internal/server/handler_check.go | 1 + internal/server/handler_install.go | 2 ++ internal/server/handler_sync.go | 1 + internal/server/handler_update.go | 1 + 5 files changed, 6 insertions(+) diff --git a/internal/server/handler_audit.go b/internal/server/handler_audit.go index 8056e308..1fb97808 100644 --- a/internal/server/handler_audit.go +++ b/internal/server/handler_audit.go @@ -40,6 +40,7 @@ type auditResultResponse struct { AuditableBytes int64 `json:"auditableBytes"` Analyzability float64 `json:"analyzability"` TierProfile audit.TierProfile `json:"tierProfile"` + Kind string `json:"kind,omitempty"` } type auditSummary struct { diff --git a/internal/server/handler_check.go b/internal/server/handler_check.go index d9e5adfd..11ac5d18 100644 --- a/internal/server/handler_check.go +++ b/internal/server/handler_check.go @@ -23,6 +23,7 @@ type skillCheckResult struct { Version string `json:"version"` Status string `json:"status"` InstalledAt string `json:"installed_at,omitempty"` + Kind string `json:"kind,omitempty"` } func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index eb3c0301..a919193e 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -91,6 +91,7 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { SkipAudit bool `json:"skipAudit"` Into string `json:"into"` Name string `json:"name"` + Kind string `json:"kind,omitempty"` } if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON body") @@ -251,6 +252,7 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { SkipAudit bool `json:"skipAudit"` Track bool `json:"track"` Into string `json:"into"` + Kind string `json:"kind,omitempty"` } if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON body") diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index b93f99d5..9e6729db 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -181,6 +181,7 @@ type diffItem struct { Skill string `json:"skill"` Action string `json:"action"` // "link", "update", "skip", "prune", "local" Reason string `json:"reason"` // human-readable description + Kind string `json:"kind,omitempty"` } type diffTarget struct { diff --git a/internal/server/handler_update.go b/internal/server/handler_update.go index eba69bce..2743b39f 100644 --- a/internal/server/handler_update.go +++ b/internal/server/handler_update.go @@ -28,6 +28,7 @@ type updateResultItem struct { IsRepo bool `json:"isRepo"` AuditRiskScore int `json:"auditRiskScore,omitempty"` AuditRiskLabel string `json:"auditRiskLabel,omitempty"` + Kind string `json:"kind,omitempty"` } func (s *Server) updateAuditThreshold() string { From 2f4021fc6aa4b7a0f259b7c3450ff9632504f1f3 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:44:08 +0800 Subject: [PATCH 020/205] feat: add kind badge to list TUI and remaining frontend pages list_tui_item: show [A] badge prefix for agents in terminal list view. list.go: add Kind field to skillEntry and skillJSON structs. SyncPage: add kind badge to diff items using DiffTarget.items.kind. AuditPage: add kind badge to audit result cards. UpdatePage: add kind badge to update result items. SkillPickerModal: add kind badge in skill picker. client.ts: add kind to DiffTarget items and AuditResult types. --- cmd/skillshare/list.go | 2 ++ cmd/skillshare/list_tui_item.go | 6 +++++- ui/src/api/client.ts | 7 ++++++- ui/src/components/SkillPickerModal.tsx | 4 +++- ui/src/pages/AuditPage.tsx | 2 ++ ui/src/pages/SyncPage.tsx | 4 +++- ui/src/pages/UpdatePage.tsx | 9 +++++++-- 7 files changed, 28 insertions(+), 6 deletions(-) diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index f3be3bc3..828b37a8 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -650,6 +650,7 @@ func cmdList(args []string) error { type skillEntry struct { Name string + Kind string // "skill" or "agent" Source string Type string InstalledAt string @@ -663,6 +664,7 @@ type skillEntry struct { // skillJSON is the JSON representation for --json output. type skillJSON struct { Name string `json:"name"` + Kind string `json:"kind,omitempty"` // "skill" or "agent" RelPath string `json:"relPath"` Source string `json:"source,omitempty"` Type string `json:"type,omitempty"` diff --git a/cmd/skillshare/list_tui_item.go b/cmd/skillshare/list_tui_item.go index b066a7ba..e4be8993 100644 --- a/cmd/skillshare/list_tui_item.go +++ b/cmd/skillshare/list_tui_item.go @@ -206,7 +206,11 @@ func skillTitleLine(e skillEntry) string { // Disabled: dim the entire name + ⊘ prefix return tc.Dim.Render("⊘ " + compactSkillPath(e)) } - title := colorSkillPath(compactSkillPath(e)) + var prefix string + if e.Kind == "agent" { + prefix = tc.Cyan.Render("[A]") + " " + } + title := prefix + colorSkillPath(compactSkillPath(e)) if badge := skillTypeBadge(e); badge != "" { return title + " " + badge } diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index c2b7c05a..0ee208ed 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -597,7 +597,7 @@ export interface ConfigSaveResponse { export interface DiffTarget { target: string; - items: { skill: string; action: string; reason?: string }[]; + items: { skill: string; action: string; reason?: string; kind?: 'skill' | 'agent' }[]; skippedCount?: number; collisionCount?: number; } @@ -631,6 +631,7 @@ export interface InstallResult { export interface UpdateResultItem { name: string; + kind?: 'skill' | 'agent'; action: string; // "updated", "up-to-date", "skipped", "error", "blocked" message?: string; isRepo: boolean; @@ -663,6 +664,7 @@ export interface DiscoveredSkill { name: string; path: string; description?: string; + kind?: 'skill' | 'agent'; } export interface DiscoverResult { @@ -705,6 +707,7 @@ export interface LocalSkillInfo { targetName: string; size: number; modTime: string; + kind?: 'skill' | 'agent'; } export interface CollectScanTarget { @@ -770,6 +773,7 @@ export interface RepoCheckResult { export interface SkillCheckResult { name: string; + kind?: 'skill' | 'agent'; source: string; version: string; status: string; @@ -875,6 +879,7 @@ export interface AuditFinding { export interface AuditResult { skillName: string; + kind?: 'skill' | 'agent'; findings: AuditFinding[]; riskScore: number; riskLabel: 'clean' | 'low' | 'medium' | 'high' | 'critical'; diff --git a/ui/src/components/SkillPickerModal.tsx b/ui/src/components/SkillPickerModal.tsx index 30ed8654..6967a878 100644 --- a/ui/src/components/SkillPickerModal.tsx +++ b/ui/src/components/SkillPickerModal.tsx @@ -5,6 +5,7 @@ import DialogShell from './DialogShell'; import { Input, Checkbox } from './Input'; import { radius } from '../design'; import type { DiscoveredSkill } from '../api/client'; +import KindBadge from './KindBadge'; interface SkillPickerModalProps { open: boolean; @@ -162,7 +163,8 @@ export default function SkillPickerModal({ /> )}
toggle(skill.path) : undefined}> - + + {skill.kind && } {skill.name} {skill.path !== '.' && skill.path !== skill.name && ( diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index f4b47a8c..357d9d62 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -26,6 +26,7 @@ import { radius, palette } from '../design'; import { severityBadgeVariant } from '../lib/severity'; import { BlockStamp, RiskMeter, riskColor, riskBgColor } from '../components/audit'; import ScrollToTop from '../components/ScrollToTop'; +import KindBadge from '../components/KindBadge'; type SeverityFilter = 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO'; @@ -487,6 +488,7 @@ function SkillAuditCard({ result }: { result: AuditResult; index?: number }) { )}
+ {result.kind && } diff --git a/ui/src/pages/SyncPage.tsx b/ui/src/pages/SyncPage.tsx index 8a428398..e422c7ea 100644 --- a/ui/src/pages/SyncPage.tsx +++ b/ui/src/pages/SyncPage.tsx @@ -27,6 +27,7 @@ import { formatSyncToast, invalidateAfterSync } from '../lib/sync'; import StreamProgressBar from '../components/StreamProgressBar'; import SyncResultList from '../components/SyncResultList'; import { radius, shadows } from '../design'; +import KindBadge from '../components/KindBadge'; function extractIgnoreSources(data: IgnoreSources): IgnoreSources { return { @@ -535,11 +536,12 @@ function DiffTargetCard({ diff }: { diff: DiffTarget }) { ); } -function DiffItemRow({ item }: { item: { action: string; skill: string; reason?: string } }) { +function DiffItemRow({ item }: { item: { action: string; skill: string; reason?: string; kind?: 'skill' | 'agent' } }) { return (
+ {item.kind && } {item.skill} diff --git a/ui/src/pages/UpdatePage.tsx b/ui/src/pages/UpdatePage.tsx index 361990b1..bf8b37ca 100644 --- a/ui/src/pages/UpdatePage.tsx +++ b/ui/src/pages/UpdatePage.tsx @@ -17,11 +17,13 @@ import { api } from '../api/client'; import type { CheckResult } from '../api/client'; import StreamProgressBar from '../components/StreamProgressBar'; import { radius } from '../design'; +import KindBadge from '../components/KindBadge'; type UpdatePhase = 'idle' | 'updating' | 'done'; interface ItemUpdateStatus { name: string; + kind?: 'skill' | 'agent'; isRepo: boolean; status: 'pending' | 'in-progress' | 'success' | 'error' | 'blocked' | 'skipped'; message?: string; @@ -237,6 +239,7 @@ export default function UpdatePage() { if (idx === i) { return { ...s, + kind: item.kind, status: actionToStatus(item.action), message: item.message, auditRiskLabel: item.auditRiskLabel, @@ -418,7 +421,8 @@ export default function UpdatePage() { >
- + + {item.kind && } {item.name} {item.message && ( @@ -565,7 +569,8 @@ export default function UpdatePage() { toggleSkill(skill.name)} />
- + + {skill.kind && } {skill.name} From abd2328b9a0a20bff3909b004ee3d935d2907b03 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:46:46 +0800 Subject: [PATCH 021/205] feat(ui): add KindBadge to CollectPage and BatchUninstallPage CollectPage: show kind badge next to local skill names in scan results. BatchUninstallPage: show kind badge next to each skill in uninstall list. --- ui/src/pages/BatchUninstallPage.tsx | 6 +++++- ui/src/pages/CollectPage.tsx | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/ui/src/pages/BatchUninstallPage.tsx b/ui/src/pages/BatchUninstallPage.tsx index b9fa6bdb..deacaa5c 100644 --- a/ui/src/pages/BatchUninstallPage.tsx +++ b/ui/src/pages/BatchUninstallPage.tsx @@ -32,6 +32,7 @@ import { useToast } from '../components/Toast'; import { PageSkeleton } from '../components/Skeleton'; import { Virtuoso } from 'react-virtuoso'; import { radius } from '../design'; +import KindBadge from '../components/KindBadge'; /* ── Glob → Regex (supports * and ? only) ──────────── */ @@ -427,7 +428,10 @@ export default function BatchUninstallPage() { disabled={phase !== 'selecting'} />
- {skill.name} + + + {skill.name} + {skill.relPath !== skill.name && ( {skill.relPath} )} diff --git a/ui/src/pages/CollectPage.tsx b/ui/src/pages/CollectPage.tsx index 2386ea8f..13019ad6 100644 --- a/ui/src/pages/CollectPage.tsx +++ b/ui/src/pages/CollectPage.tsx @@ -27,6 +27,7 @@ import { api, type CollectScanTarget, type CollectResult } from '../api/client'; import { queryKeys } from '../lib/queryKeys'; import { radius, shadows } from '../design'; import { formatSize } from '../lib/format'; +import KindBadge from '../components/KindBadge'; type Phase = 'idle' | 'scanning' | 'scanned' | 'collecting' | 'done'; @@ -415,6 +416,7 @@ function ScanTargetCard({ onToggle(key)} size="sm" disabled={disabled} /> + {sk.kind && } {sk.name} From 2cb830f736128d678af6b45558fcc75a12d830df Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:50:01 +0800 Subject: [PATCH 022/205] feat(check): add remote update detection for tracked agents Add RepoURL field to AgentCheckResult. Add EnrichAgentResultsWithRemote() that batches remote HEAD hash checks via ParallelCheckURLs and marks agents as 'update_available' when remote has newer commits than the installed version. --- internal/check/agent_check.go | 61 ++++++++++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 1 deletion(-) diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go index 77d13112..e3d67bfe 100644 --- a/internal/check/agent_check.go +++ b/internal/check/agent_check.go @@ -15,7 +15,8 @@ type AgentCheckResult struct { Name string `json:"name"` Source string `json:"source,omitempty"` Version string `json:"version,omitempty"` - Status string `json:"status"` // "up_to_date", "drifted", "local", "error" + RepoURL string `json:"repoUrl,omitempty"` + Status string `json:"status"` // "up_to_date", "drifted", "local", "error", "update_available" Message string `json:"message,omitempty"` } @@ -63,6 +64,7 @@ func checkOneAgent(agentsDir, agentName, fileName string) AgentCheckResult { result.Source = meta.Source result.Version = meta.Version + result.RepoURL = meta.RepoURL // Compare file hash agentPath := filepath.Join(agentsDir, fileName) @@ -87,3 +89,60 @@ func checkOneAgent(agentsDir, agentName, fileName string) AgentCheckResult { return result } + +// EnrichAgentResultsWithRemote checks agents that have RepoURL + Version +// against their remote HEAD to detect available updates. +// Uses ParallelCheckURLs for efficient batched remote probing. +func EnrichAgentResultsWithRemote(results []AgentCheckResult, onDone func()) { + // Collect unique repo URLs that have version info + type agentRef struct { + repoURL string + version string + indices []int + } + urlMap := make(map[string]*agentRef) + for i, r := range results { + if r.RepoURL == "" || r.Version == "" { + continue + } + if ref, ok := urlMap[r.RepoURL]; ok { + ref.indices = append(ref.indices, i) + } else { + urlMap[r.RepoURL] = &agentRef{ + repoURL: r.RepoURL, + version: r.Version, + indices: []int{i}, + } + } + } + + if len(urlMap) == 0 { + return + } + + // Build URL check inputs + var inputs []URLCheckInput + var refs []*agentRef + for _, ref := range urlMap { + inputs = append(inputs, URLCheckInput{RepoURL: ref.repoURL}) + refs = append(refs, ref) + } + + outputs := ParallelCheckURLs(inputs, onDone) + + // Apply results + for i, out := range outputs { + ref := refs[i] + if out.Err != nil { + continue + } + if out.RemoteHash != "" && out.RemoteHash != ref.version { + for _, idx := range ref.indices { + if results[idx].Status == "up_to_date" { + results[idx].Status = "update_available" + results[idx].Message = "newer version available" + } + } + } + } +} From 9896aa7cd654ccde91eba9fa17947c30d7b1dc9b Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 00:54:00 +0800 Subject: [PATCH 023/205] docs: add Agents concepts page to website documentation Create website/docs/understand/agents.md covering: - Skills vs Agents comparison table - Directory structure (global + project) - Agent file format and naming conventions - Supported targets (claude, cursor, opencode, augment) - Sync modes (merge/symlink/copy) - .agentignore usage - Install from repos with auto-detection - CLI commands reference - Data flow diagram Update sidebar ordering to include agents between skill-format and project-skills. --- website/docs/understand/agents.md | 233 ++++++++++++++++++ website/docs/understand/audit-engine.md | 2 +- .../docs/understand/declarative-manifest.md | 2 +- website/docs/understand/project-skills.md | 2 +- website/sidebars.ts | 1 + 5 files changed, 237 insertions(+), 3 deletions(-) create mode 100644 website/docs/understand/agents.md diff --git a/website/docs/understand/agents.md b/website/docs/understand/agents.md new file mode 100644 index 00000000..13b01186 --- /dev/null +++ b/website/docs/understand/agents.md @@ -0,0 +1,233 @@ +--- +sidebar_position: 6 +--- + +# Agents + +Single-file `.md` resources managed alongside skills — same sync, audit, and lifecycle, different shape. + +:::tip When does this matter? +Some AI CLIs (Claude Code, Cursor, OpenCode, Augment) distinguish between **skills** (directories with `SKILL.md`) and **agents** (standalone `.md` files). If your targets support agents, skillshare can manage both from a single source of truth. +::: + +## Skills vs Agents + +| | Skill | Agent | +|---|---|---| +| **Shape** | Directory containing `SKILL.md` + optional files | Single `.md` file | +| **Name resolution** | `SKILL.md` frontmatter `name` field | Filename (e.g. `tutor.md` = "tutor"), optional frontmatter `name` override | +| **Source directory** | `~/.config/skillshare/skills/` | `~/.config/skillshare/agents/` | +| **Project source** | `.skillshare/skills/` | `.skillshare/agents/` | +| **Ignore file** | `.skillignore` | `.agentignore` | +| **Sync unit** | Directory symlink (merge), whole-dir symlink (symlink), directory copy (copy) | File symlink (merge), whole-dir symlink (symlink), file copy (copy) | +| **Nested support** | `path/to/skill` flattens to `path__to__skill` | `dir/file.md` flattens to `file.md` (directory prefix stripped) | +| **Tracking** | Supported | Supported | +| **Audit** | Supported | Supported | +| **Collect** | Supported | Supported | + +--- + +## Directory Structure + +### Global + +``` +~/.config/skillshare/ +├── skills/ # Skill source (directories) +│ ├── my-skill/ +│ │ └── SKILL.md +│ └── .skillignore +├── agents/ # Agent source (files) +│ ├── tutor.md +│ ├── reviewer.md +│ └── .agentignore +└── config.yaml +``` + +### Project + +``` +.skillshare/ +├── skills/ +│ └── api-conventions/ +│ └── SKILL.md +├── agents/ +│ ├── onboarding.md +│ └── .agentignore +└── config.yaml +``` + +--- + +## Agent File Format + +An agent is a plain `.md` file. Frontmatter is optional: + +```markdown +--- +name: math-tutor +description: Helps with math problems step by step +--- + +# Math Tutor + +You are a patient math tutor. Walk through problems step by step. +``` + +**Naming rules:** +- Filename determines the agent name: `tutor.md` = "tutor" +- Optional `name` field in YAML frontmatter overrides the filename +- Filenames must start with a letter or number, containing only `a-z`, `A-Z`, `0-9`, `_`, `-`, `.` +- Maximum name length: 128 characters + +**Conventional excludes** — these filenames are always skipped during discovery: +`README.md`, `CHANGELOG.md`, `LICENSE.md`, `HISTORY.md`, `SECURITY.md`, `SKILL.md` + +--- + +## Supported Targets + +Only targets with an `agents` path definition receive agent syncs. Currently: + +| Target | Global agents path | Project agents path | +|--------|-------------------|---------------------| +| `claude` | `~/.claude/agents` | `.claude/agents` | +| `cursor` | `~/.cursor/agents` | `.cursor/agents` | +| `opencode` | `~/.config/opencode/agents` | `.opencode/agents` | +| `augment` | `~/.augment/agents` | `.augment/agents` | + +Targets without an `agents` entry (the majority) only receive skills. + +--- + +## Sync Behavior + +Agent sync supports all three modes, same as skills: + +| Mode | Behavior | +|------|----------| +| **merge** (default) | Per-file symlinks. Local agent files in the target are preserved. | +| **symlink** | Entire agents directory symlinked. | +| **copy** | Agent files copied as real files. | + +```bash +# Sync everything (skills + agents) +skillshare sync + +# Sync agents only +skillshare sync agents +``` + +Orphan cleanup works the same way — broken symlinks or copied files that no longer have a source are pruned automatically. + +--- + +## `.agentignore` + +Works identically to `.skillignore` — gitignore-style patterns to exclude agents from sync. + +| Scope | Path | +|-------|------| +| Global | `~/.config/skillshare/agents/.agentignore` | +| Project | `.skillshare/agents/.agentignore` | + +Example: + +```gitignore +# Disable draft agents +draft-* +# Disable a specific agent +experimental-reviewer +``` + +Use `enable`/`disable` with `--kind agent` to manage entries: + +```bash +skillshare disable --kind agent draft-reviewer +skillshare enable --kind agent draft-reviewer +``` + +--- + +## Installing Agents from Repos + +When installing a repository, skillshare auto-detects agents: + +1. Finds an `agents/` convention directory in the repo — `.md` files inside (excluding conventional excludes) are agent candidates +2. If the repo has both `skills/` and `agents/`, both are installed +3. If the repo has only `agents/` (no `SKILL.md` markers), agents are installed +4. If the repo has no `skills/`, no `agents/` dir, but has loose `.md` files at root — treated as agents (pure agent repo) + +### Explicit flags + +```bash +# Install only agents from a repo +skillshare install github.com/user/repo --kind agent + +# Install specific agents by name (-a shorthand) +skillshare install github.com/user/repo -a tutor,reviewer + +# Install specific skills by name (unchanged) +skillshare install github.com/user/repo -s my-skill +``` + +--- + +## CLI Commands + +Most commands accept a `agents` positional argument or `--kind agent` flag to scope to agents: + +| Command | Example | What it does | +|---------|---------|--------------| +| `list agents` | `skillshare list agents` | List agents in source | +| `check agents` | `skillshare check agents` | Check agent integrity and update status | +| `audit agents` | `skillshare audit agents` | Security scan agents | +| `sync agents` | `skillshare sync agents` | Sync only agents to targets | +| `enable --kind agent` | `skillshare enable --kind agent tutor` | Re-enable a disabled agent | +| `disable --kind agent` | `skillshare disable --kind agent tutor` | Disable an agent via `.agentignore` | +| `install --kind agent` | `skillshare install repo --kind agent` | Install only agents from a repo | +| `install -a` | `skillshare install repo -a tutor` | Install specific agent(s) by name | + +Without the kind filter, commands operate on **both** skills and agents. + +--- + +## Data Flow + +```mermaid +flowchart TD + SRC["Agent Source
~/.config/skillshare/agents/"] + DISC["AgentKind.Discover()
Scan .md files, apply .agentignore"] + SYNC["SyncAgents()
merge / symlink / copy"] + TGT_CLAUDE["~/.claude/agents/"] + TGT_CURSOR["~/.cursor/agents/"] + TGT_OC["~/.config/opencode/agents/"] + PRUNE["PruneOrphanAgentLinks()
Remove stale symlinks"] + + SRC --> DISC + DISC --> SYNC + SYNC --> TGT_CLAUDE + SYNC --> TGT_CURSOR + SYNC --> TGT_OC + SYNC --> PRUNE +``` + +--- + +## Project Mode + +Agents work in project mode the same way skills do: + +```bash +# Initialize project (creates .skillshare/agents/ alongside .skillshare/skills/) +skillshare init -p + +# Install agents into project +skillshare install github.com/user/repo --kind agent -p + +# Sync project agents +skillshare sync -p +``` + +Project agent source: `.skillshare/agents/` +Installed agents (tracked) get `.skillshare-meta.json` sidecar files and `.gitignore` entries, same as tracked skills. diff --git a/website/docs/understand/audit-engine.md b/website/docs/understand/audit-engine.md index 14be864f..a97cc39e 100644 --- a/website/docs/understand/audit-engine.md +++ b/website/docs/understand/audit-engine.md @@ -1,5 +1,5 @@ --- -sidebar_position: 8 +sidebar_position: 9 --- # Audit Engine diff --git a/website/docs/understand/declarative-manifest.md b/website/docs/understand/declarative-manifest.md index fe041181..d0dffb42 100644 --- a/website/docs/understand/declarative-manifest.md +++ b/website/docs/understand/declarative-manifest.md @@ -1,5 +1,5 @@ --- -sidebar_position: 7 +sidebar_position: 8 --- # Declarative Skill Manifest diff --git a/website/docs/understand/project-skills.md b/website/docs/understand/project-skills.md index 00978f35..11a61e40 100644 --- a/website/docs/understand/project-skills.md +++ b/website/docs/understand/project-skills.md @@ -1,5 +1,5 @@ --- -sidebar_position: 6 +sidebar_position: 7 --- # Project Skills diff --git a/website/sidebars.ts b/website/sidebars.ts index afd04b29..76c5d532 100644 --- a/website/sidebars.ts +++ b/website/sidebars.ts @@ -91,6 +91,7 @@ const sidebars: SidebarsConfig = { 'understand/sync-modes', 'understand/tracked-repositories', 'understand/skill-format', + 'understand/agents', 'understand/project-skills', 'understand/declarative-manifest', 'understand/audit-engine', From 8ed32e2ded04ff88c86a8ed89212655631bce0f7 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 10:49:12 +0800 Subject: [PATCH 024/205] feat(install): discover agents in tracked repos on clone and update installTrackedRepoImpl: discover agents alongside skills after clone. Report agent count in warnings. Update empty-repo warning to account for repos that have only agents. updateTrackedRepo: discover agents after git pull, report count. --- internal/install/install_tracked.go | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/internal/install/install_tracked.go b/internal/install/install_tracked.go index a09ce64b..65667d7d 100644 --- a/internal/install/install_tracked.go +++ b/internal/install/install_tracked.go @@ -87,8 +87,16 @@ func installTrackedRepoImpl(source *Source, sourceDir string, opts InstallOption result.Skills = append(result.Skills, skill.Name) } - if len(skills) == 0 { - result.Warnings = append(result.Warnings, "no SKILL.md files found in repository") + // Also discover agents in the tracked repo + agents := discoverAgents(destPath, len(skills) > 0) + if len(agents) > 0 { + result.Warnings = append(result.Warnings, fmt.Sprintf("%d agent(s) found in tracked repo", len(agents))) + } + + if len(skills) == 0 && len(agents) == 0 { + result.Warnings = append(result.Warnings, "no SKILL.md files or agents found in repository") + } else if len(skills) == 0 { + // Only agents found — not a warning, just informational } // Security audit on the entire tracked repo @@ -145,6 +153,12 @@ func updateTrackedRepo(repoPath string, result *TrackedRepoResult, opts InstallO result.Skills = append(result.Skills, skill.Name) } + // Also discover agents in the tracked repo + agents := discoverAgents(repoPath, len(skills) > 0) + if len(agents) > 0 { + result.Warnings = append(result.Warnings, fmt.Sprintf("%d agent(s) found in tracked repo", len(agents))) + } + result.Action = "updated" return result, nil } From f8c958d30501d1ae1bcb6d910dca02d841b77939 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 10:52:01 +0800 Subject: [PATCH 025/205] test: add agent integration tests for install, check, enable, and uninstall 7 integration tests covering: - install --kind flag parsing and validation - install -a flag parsing - check agents with empty dir, local agent, and JSON output - enable/disable --kind agent routing to .agentignore - uninstall --kind agent flag parsing --- tests/integration/install_agent_test.go | 131 ++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 tests/integration/install_agent_test.go diff --git a/tests/integration/install_agent_test.go b/tests/integration/install_agent_test.go new file mode 100644 index 00000000..39547e8b --- /dev/null +++ b/tests/integration/install_agent_test.go @@ -0,0 +1,131 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +func TestInstall_AgentFlag_ParsesCorrectly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // --kind with invalid value should error + result := sb.RunCLI("install", "--kind", "invalid", "test") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "must be 'skill' or 'agent'") +} + +func TestInstall_AgentFlagShort_ParsesCorrectly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // -a without value should error + result := sb.RunCLI("install", "-a") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "requires agent name") +} + +func TestCheck_Agents_EmptyDir(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // Create agents source dir + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + + result := sb.RunCLI("check", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "No agents found") +} + +func TestCheck_Agents_LocalAgent(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // Create agents source dir with a local agent + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor agent"), 0644) + + result := sb.RunCLI("check", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") + result.AssertAnyOutputContains(t, "local") +} + +func TestCheck_Agents_JsonOutput(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor"), 0644) + + result := sb.RunCLI("check", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"name"`) + result.AssertAnyOutputContains(t, `"status"`) +} + +func TestEnable_KindAgent_ParsesCorrectly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // Create agents source dir + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + + // Disable an agent — --kind goes after -g (mode flag) + result := sb.RunCLI("disable", "-g", "--kind", "agent", "tutor") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, ".agentignore") + + // Verify .agentignore was created + agentIgnorePath := filepath.Join(agentsDir, ".agentignore") + if !sb.FileExists(agentIgnorePath) { + t.Error(".agentignore should be created") + } +} + +func TestUninstall_KindAgent_ParsesFlag(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // --kind agent with nonexistent agent — should error with agent not found + result := sb.RunCLI("uninstall", "-g", "--kind", "agent", "nonexistent") + // May fail because agent doesn't exist, but should parse flags correctly (no "unknown option") + result.AssertOutputNotContains(t, "unknown option") +} From d759a7e8ee2784f9652143b0dc71b407c1fc5085 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 10:53:28 +0800 Subject: [PATCH 026/205] docs: add agent support to all command reference and getting-started pages Command pages updated (7 files): - list: 'skillshare list agents', [A] badge - install: --kind agent, -a flag, auto-detection - sync: agent sync with all three modes, supported targets - check: 'skillshare check agents' drift detection - audit: 'skillshare audit agents' - enable: --kind agent routes to .agentignore - uninstall: --kind agent flag Getting started updated: - first-sync: link to agents concepts in What's Next - quick-reference: agent management command table + agents source path Commands index: added agents row to What do you want to do table. --- website/docs/getting-started/first-sync.md | 1 + .../docs/getting-started/quick-reference.md | 17 +++++++++++++- website/docs/reference/commands/audit.md | 14 +++++++++++ website/docs/reference/commands/check.md | 13 +++++++++++ website/docs/reference/commands/enable.md | 18 +++++++++++++++ website/docs/reference/commands/index.md | 1 + website/docs/reference/commands/install.md | 23 +++++++++++++++++++ website/docs/reference/commands/list.md | 15 ++++++++++++ website/docs/reference/commands/sync.md | 15 ++++++++++++ website/docs/reference/commands/uninstall.md | 13 +++++++++++ 10 files changed, 129 insertions(+), 1 deletion(-) diff --git a/website/docs/getting-started/first-sync.md b/website/docs/getting-started/first-sync.md index ecfbad86..3708661b 100644 --- a/website/docs/getting-started/first-sync.md +++ b/website/docs/getting-started/first-sync.md @@ -135,6 +135,7 @@ You should see: - [Create your own skill](/docs/how-to/daily-tasks/creating-skills) - [Sync across machines](/docs/how-to/sharing/cross-machine-sync) - [Organization-wide skills](/docs/how-to/sharing/organization-sharing) +- [Agents](/docs/understand/agents) — Manage single-file `.md` agents alongside skills (supported by Claude, Cursor, OpenCode, Augment) ## What Just Happened? diff --git a/website/docs/getting-started/quick-reference.md b/website/docs/getting-started/quick-reference.md index eac752a4..17786ede 100644 --- a/website/docs/getting-started/quick-reference.md +++ b/website/docs/getting-started/quick-reference.md @@ -51,6 +51,20 @@ Command cheat sheet for skillshare. | `extras remove ` | Remove an extras entry from config | | `extras collect ` | Collect local files from extras target into source | +## Agent Management + +| Command | Description | +|---------|-------------| +| `list agents` | List installed agents | +| `install --kind agent` | Install only agents from a repo | +| `install -a ` | Install specific agent(s) by name | +| `uninstall --kind agent ` | Remove an agent | +| `sync agents` | Sync only agents to targets | +| `check agents` | Check agents for updates | +| `audit agents` | Security scan agents | +| `enable --kind agent ` | Re-enable a disabled agent | +| `disable --kind agent ` | Disable an agent via `.agentignore` | + ## Sync Operations | Command | Description | @@ -163,7 +177,8 @@ make playground-down # stop container |------|-------------| | `~/.config/skillshare/config.yaml` | Configuration file | | `~/.config/skillshare/skills/registry.yaml` | Installed skill registry (auto-managed) | -| `~/.config/skillshare/skills/` | Source directory | +| `~/.config/skillshare/skills/` | Skill source directory | +| `~/.config/skillshare/agents/` | Agent source directory | | `~/.config/skillshare/extras//` | Extras source directories | | `~/.local/state/skillshare/logs/` | Operation and audit logs | | `~/.local/share/skillshare/backups/` | Backup directory | diff --git a/website/docs/reference/commands/audit.md b/website/docs/reference/commands/audit.md index bde2bd2e..7a83d033 100644 --- a/website/docs/reference/commands/audit.md +++ b/website/docs/reference/commands/audit.md @@ -359,6 +359,19 @@ Binary files (images, `.wasm`, etc.) and hidden directories (`.git`) are skipped |-----------|-------------| | `rules` | Browse, enable, and disable audit rules (see [`audit rules`](/docs/reference/commands/audit-rules)) | +## Agent Support + +`skillshare audit agents` scopes the security scan to agents only, scanning `.md` files in the agents source directory: + +```bash +skillshare audit agents # Scan all agents +skillshare audit agents --threshold high # Block on HIGH+ for agents +skillshare audit agents --format sarif # SARIF output for agents +skillshare audit agents -p # Scan project agents +``` + +Agents are subject to the same audit rules, severity levels, and threshold gating as skills. Without the `agents` argument, `audit` scans skills only (default behavior). See [Agents](/docs/understand/agents) for background. + ## See Also - [Audit Engine](/docs/understand/audit-engine) — How the engine works (threat model, risk scoring, command tiering) @@ -370,3 +383,4 @@ Binary files (images, `.wasm`, etc.) and hidden directories (`.git`) are skipped - [Securing Your Skills](/docs/how-to/advanced/security) — Security guide for teams and organizations - [CI/CD Skill Validation](/docs/how-to/recipes/ci-cd-skill-validation) — Pipeline automation recipe - [Pre-commit Hook](/docs/how-to/recipes/pre-commit-hook) — Automatic audit on every commit +- [Agents](/docs/understand/agents) — Agent concepts diff --git a/website/docs/reference/commands/check.md b/website/docs/reference/commands/check.md index 7a9782f7..f00f48ad 100644 --- a/website/docs/reference/commands/check.md +++ b/website/docs/reference/commands/check.md @@ -162,8 +162,21 @@ skillshare check -p --group frontend # Check project group skillshare check -p --json # JSON output for project ``` +## Agent Support + +`skillshare check agents` scopes the check to agents only, reporting drift and update status for `.md` files in the agents source directory: + +```bash +skillshare check agents # Check all agents +skillshare check agents --json # JSON output for agents +skillshare check agents -p # Check project agents +``` + +Without the `agents` argument, `check` operates on skills only (default behavior). See [Agents](/docs/understand/agents) for background. + ## See Also - [update](/docs/reference/commands/update) — Apply updates - [list](/docs/reference/commands/list) — View installed skills - [status](/docs/reference/commands/status) — Show sync status +- [Agents](/docs/understand/agents) — Agent concepts diff --git a/website/docs/reference/commands/enable.md b/website/docs/reference/commands/enable.md index fd1943fb..b1c76cb3 100644 --- a/website/docs/reference/commands/enable.md +++ b/website/docs/reference/commands/enable.md @@ -96,9 +96,27 @@ Disabled skills show a red **disabled** badge in the detail panel. The file is created automatically on first `disable`. +## Agent Support + +Use `--kind agent` to enable or disable agents. This writes to `.agentignore` instead of `.skillignore`: + +```bash +skillshare disable --kind agent draft-reviewer # Disable an agent +skillshare enable --kind agent draft-reviewer # Re-enable an agent +skillshare disable --kind agent "experimental-*" # Disable by pattern +``` + +| Mode | `.agentignore` path | +|------|---------------------| +| Global | `~/.config/skillshare/agents/.agentignore` | +| Project | `.skillshare/agents/.agentignore` | + +See [Agents](/docs/understand/agents) for background on agent management. + ## See Also - [list](./list.md) — View disabled skills and toggle with `E` key - [Filtering Skills](/docs/how-to/daily-tasks/filtering-skills) — All filtering layers - [.skillignore](/docs/reference/filtering#skillignore) — Pattern syntax - [sync](./sync.md) — Apply changes after enable/disable +- [Agents](/docs/understand/agents) — Agent concepts diff --git a/website/docs/reference/commands/index.md b/website/docs/reference/commands/index.md index 36a4de76..4a061f6d 100644 --- a/website/docs/reference/commands/index.md +++ b/website/docs/reference/commands/index.md @@ -20,6 +20,7 @@ Complete reference for all skillshare commands. | Temporarily hide a skill without removing it | [`enable` / `disable`](./enable.md) | | Sync across machines | [`push`](./push.md) / [`pull`](./pull.md) | | Manage non-skill resources (rules, commands) | [`extras`](./extras.md) | +| Manage single-file `.md` agents | Most commands accept `agents` or `--kind agent` — see [Agents](/docs/understand/agents) | | See which skills use the most context tokens | [`analyze`](./analyze.md) | | Fix something broken | [`doctor`](./doctor.md) | | Open the web dashboard | [`ui`](./ui.md) | diff --git a/website/docs/reference/commands/install.md b/website/docs/reference/commands/install.md index 951f9a91..6a8abfc7 100644 --- a/website/docs/reference/commands/install.md +++ b/website/docs/reference/commands/install.md @@ -680,6 +680,29 @@ Lines starting with `#` are comments. Empty lines are ignored. | **Scope** | All users installing from this repo | This install only | | **Requires** | Git repo with multiple skills | Git repo with multiple skills | +## Agent Support + +When installing a repository, skillshare auto-detects agents (standalone `.md` files) alongside skills: + +- If the repo contains an `agents/` directory, `.md` files inside are discovered as agent candidates +- If the repo has both `skills/` and `agents/`, both are installed +- If the repo has only loose `.md` files at root (no `SKILL.md`), they are treated as agents + +### Explicit agent flags + +```bash +# Install only agents from a repo +skillshare install github.com/user/repo --kind agent + +# Install specific agents by name (-a shorthand) +skillshare install github.com/user/repo -a tutor,reviewer + +# Combine with project mode +skillshare install github.com/user/repo --kind agent -p +``` + +The `-a ` flag is the agent equivalent of `-s ` for skills. Agents are installed into `~/.config/skillshare/agents/` (global) or `.skillshare/agents/` (project). See [Agents](/docs/understand/agents) for the full concepts. + ## After Installing Always sync to distribute to targets: diff --git a/website/docs/reference/commands/list.md b/website/docs/reference/commands/list.md index 9ce653f9..12ea072c 100644 --- a/website/docs/reference/commands/list.md +++ b/website/docs/reference/commands/list.md @@ -263,9 +263,24 @@ For more details, see [Organizing Skills with Folders](/docs/how-to/daily-tasks/ | `✓` | Up-to-date, no local changes | | `!` | Has uncommitted changes | +## Agent Support + +`skillshare list agents` filters to agents only, showing `.md` files from the agents source directory (`~/.config/skillshare/agents/` or `.skillshare/agents/`). + +```bash +skillshare list agents # List agents only +skillshare list agents --json # JSON output for agents +skillshare list agents --verbose # Detailed agent list +``` + +In the interactive TUI, agents display an **[A]** badge to distinguish them from skills. All TUI features (filtering, detail panel, enable/disable toggle) work the same way. + +Without the `agents` argument, `list` shows skills only (default behavior). See [Agents](/docs/understand/agents) for background. + ## See Also - [enable / disable](/docs/reference/commands/enable) — Toggle skills without removing - [install](/docs/reference/commands/install) — Install skills - [uninstall](/docs/reference/commands/uninstall) — Remove skills - [status](/docs/reference/commands/status) — Show sync status +- [Agents](/docs/understand/agents) — Agent concepts diff --git a/website/docs/reference/commands/sync.md b/website/docs/reference/commands/sync.md index c0d2e032..a16b5cab 100644 --- a/website/docs/reference/commands/sync.md +++ b/website/docs/reference/commands/sync.md @@ -479,6 +479,21 @@ flowchart TD --- +## Agent Sync {#agent-sync} + +`skillshare sync` automatically syncs agents alongside skills to targets that support agents. You can also sync agents independently: + +```bash +skillshare sync # Sync skills + agents to all targets +skillshare sync agents # Sync only agents +``` + +Agent sync supports all three modes (merge, copy, symlink), matching the target's configured mode. Only targets with an `agents` path definition receive agent syncs — currently Claude, Cursor, OpenCode, and Augment. See [Agents — Supported Targets](/docs/understand/agents#supported-targets) for the full list. + +Orphan cleanup, `.agentignore` filtering, and per-target include/exclude filters all work the same way as for skills. + +--- + ## Sync Extras {#sync-extras} Sync non-skill resources (rules, commands, prompts, etc.) to arbitrary directories. Extras are configured separately from skills and have their own source directories. diff --git a/website/docs/reference/commands/uninstall.md b/website/docs/reference/commands/uninstall.md index 672b791e..eba027bb 100644 --- a/website/docs/reference/commands/uninstall.md +++ b/website/docs/reference/commands/uninstall.md @@ -288,9 +288,22 @@ skillshare sync git add .skillshare/ && git commit -m "Remove pdf skill" ``` +## Agent Support + +Use `--kind agent` to uninstall agents instead of skills: + +```bash +skillshare uninstall --kind agent tutor # Remove an agent +skillshare uninstall --kind agent tutor reviewer -f # Remove multiple agents +skillshare uninstall --kind agent --all # Remove all agents +``` + +Agent uninstall follows the same trash-and-retain behavior as skills (moved to trash, kept 7 days). See [Agents](/docs/understand/agents) for background. + ## See Also - [install](/docs/reference/commands/install) — Install skills - [list](/docs/reference/commands/list) — List installed skills - [trash](/docs/reference/commands/trash) — Manage trashed skills - [Project Skills](/docs/understand/project-skills) — Project mode concepts +- [Agents](/docs/understand/agents) — Agent concepts From eff9647defcc8b933aa71e56c591e8a53b060000 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 16:51:20 +0800 Subject: [PATCH 027/205] refactor(ui): rename SkillsPage to ResourcesPage with Skills/Agents tabs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Rename SkillsPage.tsx to ResourcesPage.tsx. Page title changed to 'Resources' with subtitle showing both skill and agent counts. Add top-level SegmentedControl tabs (Skills | Agents) for switching between resource types. Each tab has its own count badge. Sub-filters (All/Tracked/GitHub/Local) apply within the active tab. Tab selection persists via ?tab=agents URL param. Dashboard stat cards link to /resources and /resources?tab=agents. Route: /resources is the primary path; /skills kept as alias. Nav sidebar: 'Skills' → 'Resources' with Layers icon. Keyboard shortcut 's' → /resources. Tour updated for resource-oriented language. --- ui/src/App.tsx | 5 +++-- ui/src/components/Layout.tsx | 3 ++- ui/src/components/tour/tourSteps.ts | 2 +- ui/src/hooks/useGlobalShortcuts.ts | 2 +- ui/src/pages/DashboardPage.tsx | 4 ++-- ui/src/pages/{SkillsPage.tsx => ResourcesPage.tsx} | 0 6 files changed, 9 insertions(+), 7 deletions(-) rename ui/src/pages/{SkillsPage.tsx => ResourcesPage.tsx} (100%) diff --git a/ui/src/App.tsx b/ui/src/App.tsx index 0d975790..7956fd84 100644 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -13,7 +13,7 @@ import { TourProvider, TourOverlay, TourTooltip } from './components/tour'; import DashboardPage from './pages/DashboardPage'; import { BASE_PATH } from './lib/basePath'; -const SkillsPage = lazy(() => import('./pages/SkillsPage')); +const ResourcesPage = lazy(() => import('./pages/ResourcesPage')); const SkillDetailPage = lazy(() => import('./pages/SkillDetailPage')); const TargetsPage = lazy(() => import('./pages/TargetsPage')); const ExtrasPage = lazy(() => import('./pages/ExtrasPage')); @@ -53,7 +53,8 @@ export default function App() { }> } /> - } /> + } /> + } /> } /> } /> } /> diff --git a/ui/src/components/Layout.tsx b/ui/src/components/Layout.tsx index 6a3ffe43..558b7588 100644 --- a/ui/src/components/Layout.tsx +++ b/ui/src/components/Layout.tsx @@ -3,6 +3,7 @@ import { useState, useCallback, useEffect } from 'react'; import { LayoutDashboard, Puzzle, + Layers, Target, FolderPlus, RefreshCw, @@ -55,7 +56,7 @@ const navGroups: NavGroup[] = [ { label: 'MANAGE', items: [ - { to: '/skills', icon: Puzzle, label: 'Skills' }, + { to: '/resources', icon: Layers, label: 'Resources' }, { to: '/extras', icon: FolderPlus, label: 'Extras' }, { to: '/targets', icon: Target, label: 'Targets' }, { to: '/search', icon: Search, label: 'Search' }, diff --git a/ui/src/components/tour/tourSteps.ts b/ui/src/components/tour/tourSteps.ts index a0440ada..2d91581a 100644 --- a/ui/src/components/tour/tourSteps.ts +++ b/ui/src/components/tour/tourSteps.ts @@ -11,7 +11,7 @@ export interface TourStep { const ALL_STEPS: TourStep[] = [ { id: 'stats-grid', page: '/', targetSelector: "[data-tour='stats-grid']", title: 'Dashboard Overview', description: 'Real-time stats for skills, targets, and sync status. Zeros are normal — numbers update after installing skills.', placement: 'bottom' }, { id: 'quick-actions', page: '/', targetSelector: "[data-tour='quick-actions']", title: 'Quick Actions', description: 'Shortcuts for common operations: one-click sync, security scan, browse skills, batch update.', placement: 'top' }, - { id: 'skills-view', page: '/skills', targetSelector: "[data-tour='skills-view']", title: 'Skills Management', description: 'Browse all installed skills. Supports grid, grouped, and table views with search and filters.', emptyDescription: 'No skills yet. After the tour, try installing your first skill from Search or Install!', placement: 'bottom' }, + { id: 'skills-view', page: '/resources', targetSelector: "[data-tour='skills-view']", title: 'Resource Management', description: 'Browse all installed skills and agents. Switch between Skills and Agents tabs, with grid, folder, and table views.', emptyDescription: 'No resources yet. After the tour, try installing your first skill from Search or Install!', placement: 'bottom' }, { id: 'extras-list', page: '/extras', targetSelector: "[data-tour='extras-list']", title: 'Extras', description: 'Manage non-skill extra file directories (hooks, snippets, etc.) synced to targets.', emptyDescription: 'No extras yet. After the tour, try adding an extra directory (hooks, snippets, etc.) to sync alongside your skills!', placement: 'bottom' }, { id: 'targets-grid', page: '/targets', targetSelector: "[data-tour='targets-grid']", title: 'Targets', description: 'Your AI CLI tools (Claude, Cursor, etc.). Each target can be configured with its own sync mode.', placement: 'bottom' }, { id: 'skill-filters', page: '/targets', targetSelector: "[data-tour='skill-filters']", title: 'Skill Filters', description: 'Use Include/Exclude patterns to control which skills sync to each target. For example, exclude large skills from lightweight tools.', placement: 'bottom' }, diff --git a/ui/src/hooks/useGlobalShortcuts.ts b/ui/src/hooks/useGlobalShortcuts.ts index 42aea427..082dccce 100644 --- a/ui/src/hooks/useGlobalShortcuts.ts +++ b/ui/src/hooks/useGlobalShortcuts.ts @@ -35,7 +35,7 @@ export const SHORTCUT_ENTRIES: ShortcutEntry[] = [ const NAV_MAP: Record = { d: '/', - s: '/skills', + s: '/resources', t: '/targets', l: '/log', a: '/audit', diff --git a/ui/src/pages/DashboardPage.tsx b/ui/src/pages/DashboardPage.tsx index a4009b75..2d9059dc 100644 --- a/ui/src/pages/DashboardPage.tsx +++ b/ui/src/pages/DashboardPage.tsx @@ -114,7 +114,7 @@ export default function DashboardPage() { icon: Puzzle, color: 'text-blue', bg: 'bg-info-light', - to: '/skills', + to: '/resources', }, { label: 'Agents', @@ -123,7 +123,7 @@ export default function DashboardPage() { icon: Bot, color: 'text-accent', bg: 'bg-accent/10', - to: '/skills?kind=agent', + to: '/resources?tab=agents', }, { label: 'Targets', diff --git a/ui/src/pages/SkillsPage.tsx b/ui/src/pages/ResourcesPage.tsx similarity index 100% rename from ui/src/pages/SkillsPage.tsx rename to ui/src/pages/ResourcesPage.tsx From c61033a143ee2c75121735e14d37df364b639e88 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 16:52:50 +0800 Subject: [PATCH 028/205] refactor(ui): use connected SegmentedControl style for Skills/Agents tabs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move resource type toggle into the sticky toolbar row alongside the view toggle. Use connected + size='md' SegmentedControl matching the grid/folder/table view toggle design (icon + text label, white active background, shared border container). Removes the separate tab section above the toolbar — both controls now live in the same horizontal bar for a cleaner, more compact layout. --- ui/src/pages/ResourcesPage.tsx | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 6fcc39fc..0df040bd 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -808,6 +808,17 @@ export default function SkillsPage() {
{/* Search + Sort row */}
+ {/* Resource type toggle */} + { setActiveTab(v as ResourceTab); setFilterType('all'); setSearch(''); }} + options={[ + { value: 'skills', label: Skills, count: skillItems.length }, + { value: 'agents', label: Agents, count: agentItems.length }, + ]} + size="md" + connected + />
Date: Mon, 30 Mar 2026 16:54:11 +0800 Subject: [PATCH 029/205] refactor(ui): replace SegmentedControl with underline tabs for Skills/Agents Use underline tab pattern for Skills/Agents primary navigation: - Border-bottom indicator on active tab (pencil color) - Icon + label + count for each tab - Sits between page header and sticky toolbar - Clear visual hierarchy: underline tabs (primary) vs pill filters (secondary) Removes the connected SegmentedControl that was visually identical to the sub-filters, causing hierarchy confusion. --- ui/src/pages/ResourcesPage.tsx | 38 ++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 0df040bd..9455a145 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -804,21 +804,37 @@ export default function SkillsPage() { } /> + {/* Resource type underline tabs */} +
+ {([ + { key: 'skills' as ResourceTab, icon: , label: 'Skills', count: skillItems.length }, + { key: 'agents' as ResourceTab, icon: , label: 'Agents', count: agentItems.length }, + ]).map((tab) => ( + + ))} +
+ {/* Sticky toolbar */}
{/* Search + Sort row */}
- {/* Resource type toggle */} - { setActiveTab(v as ResourceTab); setFilterType('all'); setSearch(''); }} - options={[ - { value: 'skills', label: Skills, count: skillItems.length }, - { value: 'agents', label: Agents, count: agentItems.length }, - ]} - size="md" - connected - />
Date: Mon, 30 Mar 2026 16:57:24 +0800 Subject: [PATCH 030/205] fix(ui): use ss-segmented classes for resource tabs to inherit playful theme MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace raw button underline tabs with ss-segmented + ss-segmented-item classes. This hooks into the existing playful theme CSS which applies wobbly border-radius, hard offset shadows, and pencil-colored active state — matching the hand-drawn aesthetic of other segmented controls. Uses non-connected variant (gap between buttons) to visually distinguish from the sub-filter pills below. --- ui/src/pages/ResourcesPage.tsx | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 9455a145..47d78103 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -804,8 +804,8 @@ export default function SkillsPage() { } /> - {/* Resource type underline tabs */} -
+ {/* Resource type tabs */} +
{([ { key: 'skills' as ResourceTab, icon: , label: 'Skills', count: skillItems.length }, { key: 'agents' as ResourceTab, icon: , label: 'Agents', count: agentItems.length }, @@ -814,13 +814,16 @@ export default function SkillsPage() { key={tab.key} onClick={() => { setActiveTab(tab.key); setFilterType('all'); setSearch(''); }} className={` - inline-flex items-center gap-1.5 px-4 py-2.5 text-sm font-medium cursor-pointer transition-colors - border-b-2 -mb-px + ss-segmented-item ss-resource-tab + inline-flex items-center gap-1.5 px-4 py-2 text-sm font-medium cursor-pointer + transition-all duration-150 border ${activeTab === tab.key - ? 'border-pencil text-pencil' - : 'border-transparent text-pencil-light hover:text-pencil hover:border-muted-dark' + ? 'bg-surface text-pencil border-muted-dark' + : 'bg-transparent text-pencil-light border-muted hover:border-muted-dark hover:text-pencil' } `} + style={{ borderRadius: 'var(--radius-md)' }} + aria-pressed={activeTab === tab.key} > {tab.icon} {tab.label} From d3851b1a69e21a1ede95b50478df73c148008845 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 16:59:21 +0800 Subject: [PATCH 031/205] refactor(ui): underline tabs with playful theme support for resource tabs Replace SegmentedControl with proper underline tab pattern: - nav[role=tablist] with button[role=tab][aria-selected] - 3px bottom border indicator, semibold text, count badge pill - Gap-6 spacing between tabs for breathing room Playful theme enhancements: - Wavy SVG border-image for the tab bar bottom border (dark mode variant) - Active tab count badge inverts to pencil bg + paper text - Wobble border-radius on count badge via --radius-wobble-badge Clean theme: minimal underline with pencil color active state. --- ui/src/index.css | 19 +++++++++++++++++++ ui/src/pages/ResourcesPage.tsx | 27 +++++++++++++++------------ 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/ui/src/index.css b/ui/src/index.css index c4bde8d1..f72a8840 100644 --- a/ui/src/index.css +++ b/ui/src/index.css @@ -469,6 +469,25 @@ box-shadow: 2px 2px 0 rgba(45, 45, 45, 0.1); } +/* ── Playful: Resource Tabs ── */ +[data-theme="playful"] .ss-resource-tabs { + border-bottom: 3px solid var(--color-pencil) !important; + border-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='200' height='4'%3E%3Cpath d='M0 2 Q25 0 50 2 T100 2 T150 2 T200 2' stroke='%23141312' stroke-width='2' fill='none'/%3E%3C/svg%3E") 2 stretch; +} +.dark[data-theme="playful"] .ss-resource-tabs { + border-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='200' height='4'%3E%3Cpath d='M0 2 Q25 0 50 2 T100 2 T150 2 T200 2' stroke='%232e2d2a' stroke-width='2' fill='none'/%3E%3C/svg%3E") 2 stretch; +} +[data-theme="playful"] .ss-resource-tab[aria-selected="true"] { + border-bottom-color: var(--color-pencil) !important; + border-bottom-width: 3px !important; + font-weight: 700; +} +[data-theme="playful"] .ss-resource-tab[aria-selected="true"] span { + background: var(--color-pencil) !important; + color: var(--color-paper) !important; + border-radius: var(--radius-wobble-badge, 4px) !important; +} + /* ── Playful: Links ── */ [data-theme="playful"] a:not(.ss-btn):not([class*="nav"]) { text-decoration-style: wavy; diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 47d78103..cd625001 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -804,39 +804,42 @@ export default function SkillsPage() { } /> - {/* Resource type tabs */} -
+ {/* Resource type underline tabs */} +
+ {/* Sticky toolbar */}
- {/* Search + Sort row */} + {/* Search + Sort + View row */}
Date: Mon, 30 Mar 2026 16:59:33 +0800 Subject: [PATCH 032/205] fix(ui): remove redundant count subtitle from Resources page header MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Counts are already shown in the tab badges — subtitle now shows a static description instead of duplicating the numbers. --- ui/src/pages/ResourcesPage.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index cd625001..09edac7a 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -790,9 +790,9 @@ export default function SkillsPage() {
{/* Header */} } - title="Skills" - subtitle={`${skills.length} skill${skills.length !== 1 ? 's' : ''} installed`} + icon={activeTab === 'agents' ? : } + title="Resources" + subtitle="" className="mb-1!" actions={ From 273277c2d6abb47f4c02f7c2138a6e67b023908c Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 17:00:36 +0800 Subject: [PATCH 033/205] fix(ui): use Layers icon for Resources header to avoid tab icon duplication --- ui/src/pages/ResourcesPage.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 09edac7a..af14b37f 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -25,6 +25,7 @@ import { MousePointerClick, X, Bot, + Layers, } from 'lucide-react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; import { VirtuosoGrid, Virtuoso } from 'react-virtuoso'; @@ -790,7 +791,7 @@ export default function SkillsPage() {
{/* Header */} : } + icon={} title="Resources" subtitle="" className="mb-1!" From d690408df9182a942bb9034bc3c56d98c042da89 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 17:01:05 +0800 Subject: [PATCH 034/205] fix(ui): increase spacing between Resources header and tabs --- ui/src/pages/ResourcesPage.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index af14b37f..3eb5707d 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -794,7 +794,7 @@ export default function SkillsPage() { icon={} title="Resources" subtitle="" - className="mb-1!" + className="mb-4!" actions={ + +
+
+ } + confirmLabel="" + cancelLabel="Cancel" + onConfirm={() => setShowKindSelector(false)} + onCancel={() => setShowKindSelector(false)} + /> + ); + const auditConfirmDialog = ( {formContent} {pickerModal} + {kindSelectorDialog} {auditConfirmDialog} {warningConfirmDialog}
From 60489b3a0db35c71b0b0fab00820add1defe4f70 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:01:19 +0800 Subject: [PATCH 036/205] fix(install): route agent installs to agents source directory Backend (handler_install.go): - handleInstallBatch checks body.Kind field; when 'agent', uses InstallAgentFromDiscovery() writing to agentsSource() instead of skills source directory. Frontend (client.ts): - installBatch accepts optional kind param. Frontend (InstallForm.tsx): - handleBatchInstall detects kind from selected items and passes kind='agent' to the API when installing agents. --- internal/server/handler_install.go | 55 ++++++++++++++++++++++-------- ui/src/api/client.ts | 2 +- ui/src/components/InstallForm.tsx | 2 ++ 3 files changed, 44 insertions(+), 15 deletions(-) diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index be6ed48b..301288ee 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -155,28 +155,55 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { if s.IsProjectMode() { installOpts.AuditProjectRoot = s.projectRoot } + isAgent := body.Kind == "agent" + for _, sel := range body.Skills { skillName := sel.Name if body.Name != "" && len(body.Skills) == 1 { skillName = body.Name } - destPath := filepath.Join(s.cfg.Source, body.Into, skillName) - res, err := install.InstallFromDiscovery(discovery, install.SkillInfo{ - Name: sel.Name, - Path: sel.Path, - }, destPath, installOpts) - if err != nil { + + if isAgent { + // Agent install: copy single .md file to agents source + agentsDir := s.agentsSource() + agentInfo := install.AgentInfo{ + Name: sel.Name, + Path: sel.Path, + FileName: sel.Name + ".md", + } + res, err := install.InstallAgentFromDiscovery(discovery, agentInfo, agentsDir, installOpts) + if err != nil { + results = append(results, batchResultItem{ + Name: skillName, + Error: err.Error(), + }) + continue + } results = append(results, batchResultItem{ - Name: skillName, - Error: err.Error(), + Name: skillName, + Action: res.Action, + Warnings: res.Warnings, + }) + } else { + // Skill install: copy directory to skills source + destPath := filepath.Join(s.cfg.Source, body.Into, skillName) + res, err := install.InstallFromDiscovery(discovery, install.SkillInfo{ + Name: sel.Name, + Path: sel.Path, + }, destPath, installOpts) + if err != nil { + results = append(results, batchResultItem{ + Name: skillName, + Error: err.Error(), + }) + continue + } + results = append(results, batchResultItem{ + Name: skillName, + Action: res.Action, + Warnings: res.Warnings, }) - continue } - results = append(results, batchResultItem{ - Name: skillName, - Action: res.Action, - Warnings: res.Warnings, - }) } // Summary for toast diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index be490dbb..b807a163 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -265,7 +265,7 @@ export const api = { method: 'POST', body: JSON.stringify(opts), }), - installBatch: (opts: { source: string; skills: DiscoveredSkill[]; force?: boolean; skipAudit?: boolean; into?: string; name?: string; branch?: string }) => + installBatch: (opts: { source: string; skills: DiscoveredSkill[]; force?: boolean; skipAudit?: boolean; into?: string; name?: string; branch?: string; kind?: 'skill' | 'agent' }) => apiFetch('/install/batch', { method: 'POST', body: JSON.stringify(opts), diff --git a/ui/src/components/InstallForm.tsx b/ui/src/components/InstallForm.tsx index 0bd94d88..b57b3860 100644 --- a/ui/src/components/InstallForm.tsx +++ b/ui/src/components/InstallForm.tsx @@ -357,6 +357,7 @@ export default function InstallForm({ const handleBatchInstall = async (selected: DiscoveredSkill[]) => { setBatchInstalling(true); try { + const detectedKind = selected[0]?.kind; const res = await api.installBatch({ source: pendingSource, skills: selected, @@ -365,6 +366,7 @@ export default function InstallForm({ skipAudit, name: selected.length === 1 && name.trim() ? name.trim() : undefined, branch: branch.trim() || undefined, + kind: detectedKind === 'agent' ? 'agent' : undefined, }); const allWarnings: string[] = []; const auditFindings: string[] = []; From a2d3b47e4697545f0580c7f5875a0d30e3ac484c Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:03:54 +0800 Subject: [PATCH 037/205] fix(install): discover agents in subdir URLs and fix single-item kind routing Backend (install_discovery.go): - Add discoverAgents() call to all 3 subdir discovery paths (sparse checkout, GitHub API, full clone fallback). Previously only whole-repo discovery detected agents. Frontend (InstallForm.tsx): - Single-skill fast path now checks hasAgents before auto-installing. When agents exist alongside a single skill, falls through to the kind selector instead of silently installing as skill. --- internal/install/install_discovery.go | 6 ++++++ ui/src/components/InstallForm.tsx | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/internal/install/install_discovery.go b/internal/install/install_discovery.go index 8a9736cc..0dd93c1d 100644 --- a/internal/install/install_discovery.go +++ b/internal/install/install_discovery.go @@ -279,9 +279,11 @@ func discoverFromGitSubdirWithProgressImpl(source *Source, onProgress ProgressCa commitHash = hash } skills := discoverSkills(subdirPath, true) + agents := discoverAgents(subdirPath, len(skills) > 0) return &DiscoveryResult{ RepoPath: tempDir, Skills: skills, + Agents: agents, Source: source, CommitHash: commitHash, Warnings: warnings, @@ -306,9 +308,11 @@ func discoverFromGitSubdirWithProgressImpl(source *Source, onProgress ProgressCa if dlErr == nil { commitHash = hash skills := discoverSkills(subdirPath, true) + agents := discoverAgents(subdirPath, len(skills) > 0) return &DiscoveryResult{ RepoPath: tempDir, Skills: skills, + Agents: agents, Source: source, CommitHash: commitHash, }, nil @@ -343,9 +347,11 @@ func discoverFromGitSubdirWithProgressImpl(source *Source, onProgress ProgressCa } skills := discoverSkills(subdirPath, true) + agents := discoverAgents(subdirPath, len(skills) > 0) return &DiscoveryResult{ RepoPath: tempDir, Skills: skills, + Agents: agents, Source: source, CommitHash: commitHash, Warnings: warnings, diff --git a/ui/src/components/InstallForm.tsx b/ui/src/components/InstallForm.tsx index b57b3860..a8c5fb55 100644 --- a/ui/src/components/InstallForm.tsx +++ b/ui/src/components/InstallForm.tsx @@ -293,8 +293,8 @@ export default function InstallForm({ setDiscoveredSkills(disc.skills); setPendingSource(trimmed); setShowPicker(true); - } else if (disc.skills.length === 1) { - // Single discovered skill — install via batch + } else if (disc.skills.length === 1 && !hasAgents) { + // Single discovered skill (no agents) — install via batch const res = await api.installBatch({ source: trimmed, skills: disc.skills, From da4f460a90fa95546cfc9e934f7c2ed8ed60a2b9 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:05:53 +0800 Subject: [PATCH 038/205] fix(ui): dynamic picker modal title based on resource kind Show 'Select Agents to Install' when all items are agents, 'Select Resources to Install' for mixed, 'Select Skills to Install' for skills-only. --- ui/src/components/SkillPickerModal.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/ui/src/components/SkillPickerModal.tsx b/ui/src/components/SkillPickerModal.tsx index 6967a878..f0756b8f 100644 --- a/ui/src/components/SkillPickerModal.tsx +++ b/ui/src/components/SkillPickerModal.tsx @@ -86,7 +86,14 @@ export default function SkillPickerModal({ return (

- {singleSelect ? 'Select a Skill to Install' : 'Select Skills to Install'} + {singleSelect + ? 'Select a Resource to Install' + : skills.some((s) => s.kind === 'agent') + ? skills.every((s) => s.kind === 'agent') + ? 'Select Agents to Install' + : 'Select Resources to Install' + : 'Select Skills to Install' + }

{source} From 9ffc7bb52fdacc610ce8facda86555caf97b73ec Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:07:06 +0800 Subject: [PATCH 039/205] fix(ui): center-align checkbox with skill name in picker modal --- ui/src/components/SkillPickerModal.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/src/components/SkillPickerModal.tsx b/ui/src/components/SkillPickerModal.tsx index f0756b8f..38139750 100644 --- a/ui/src/components/SkillPickerModal.tsx +++ b/ui/src/components/SkillPickerModal.tsx @@ -150,12 +150,12 @@ export default function SkillPickerModal({ return (

toggle(skill.path) : undefined}> From 9be0ea001b95df902af2c8aa882ef47985e24261 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:16:06 +0800 Subject: [PATCH 041/205] fix: agent list metadata, detail lookup, card icon, and time format handler_skills list: read agent metadata directly via os.ReadFile instead of install.ReadMeta (which appends subdirectory). Fixes agents showing as 'local' instead of 'github'. handler_skills detail: strip .md from URL param before agent lookup. Fixes 'skill not found: content-marketer.md' error. handler_skills: use time.RFC3339 instead of hardcoded format string. ResourcesPage: use FileText icon for agents, Folder for skills. --- internal/server/handler_skills.go | 42 ++++++++++++++++++------------- ui/src/pages/ResourcesPage.tsx | 5 +++- 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 82a07583..9ade98be 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -1,6 +1,7 @@ package server import ( + "encoding/json" "fmt" "log" "net/http" @@ -76,7 +77,7 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { } if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") + item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) item.Source = meta.Source item.Type = meta.Type item.RepoURL = meta.RepoURL @@ -107,14 +108,17 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { SourcePath: agentPath, } - // Check for agent metadata + // Check for agent metadata (agent meta is a standalone file, not inside a dir) metaPath := filepath.Join(agentsSource, agentName+".skillshare-meta.json") - if meta, _ := install.ReadMeta(metaPath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version + if metaData, readErr := os.ReadFile(metaPath); readErr == nil { + var meta install.SkillMeta + if json.Unmarshal(metaData, &meta) == nil { + item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) + item.Source = meta.Source + item.Type = meta.Type + item.RepoURL = meta.RepoURL + item.Version = meta.Version + } } items = append(items, item) @@ -202,24 +206,28 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { // Fallback: check agents source if agentsSource != "" { - agentFile := name + ".md" + agentName := strings.TrimSuffix(name, ".md") + agentFile := agentName + ".md" agentPath := filepath.Join(agentsSource, agentFile) if data, err := os.ReadFile(agentPath); err == nil { item := skillItem{ - Name: name, + Name: agentName, Kind: "agent", FlatName: agentFile, RelPath: agentFile, SourcePath: agentPath, } - metaPath := filepath.Join(agentsSource, name+".skillshare-meta.json") - if meta, _ := install.ReadMeta(metaPath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version + metaFilePath := filepath.Join(agentsSource, agentName+".skillshare-meta.json") + if metaData, readErr := os.ReadFile(metaFilePath); readErr == nil { + var meta install.SkillMeta + if json.Unmarshal(metaData, &meta) == nil { + item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) + item.Source = meta.Source + item.Type = meta.Type + item.RepoURL = meta.RepoURL + item.Version = meta.Version + } } writeJSON(w, map[string]any{ diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 3eb5707d..6815ace6 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -26,6 +26,7 @@ import { X, Bot, Layers, + FileText, } from 'lucide-react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; import { VirtuosoGrid, Virtuoso } from 'react-virtuoso'; @@ -605,7 +606,9 @@ const SkillPostit = memo(function SkillPostit({
{skill.isInRepo ? - : + : skill.kind === 'agent' + ? + : }

From 974c3d51b10a0a493e5ac156a0f30ae8bf5dbfab Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:23:58 +0800 Subject: [PATCH 042/205] fix(ui): agent detail hides Files, tab persists to localStorage, folder view labels SkillDetailPage: hide Files card when skill.kind === 'agent' (single file, redundant info). ResourcesPage: - Tab selection persists to localStorage, restored on revisit. - Folder view summary: '2 agents in 1 folder' (not 'skills'). - Folder tree: FileText icon for agents, Puzzle for skills. - Grid card: FileText icon for agents. --- ui/src/pages/ResourcesPage.tsx | 22 +++++++++++++++++++--- ui/src/pages/SkillDetailPage.tsx | 4 ++-- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 6815ace6..22e4d67c 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -712,6 +712,19 @@ export default function SkillsPage() { ro.observe(node); return () => ro.disconnect(); }, []); + const [searchParams] = useSearchParams(); + const [activeTab, setActiveTab] = useState(() => { + const urlTab = searchParams.get('tab'); + if (urlTab === 'agents') return 'agents'; + const saved = localStorage.getItem('skillshare:resources-tab'); + return saved === 'agents' ? 'agents' : 'skills'; + }); + const changeTab = (tab: ResourceTab) => { + setActiveTab(tab); + localStorage.setItem('skillshare:resources-tab', tab); + setFilterType('all'); + setSearch(''); + }; const [search, setSearch] = useState(''); const [filterType, setFilterType] = useState('all'); const [sortType, setSortType] = useState('name-asc'); @@ -818,7 +831,7 @@ export default function SkillsPage() { key={tab.key} role="tab" aria-selected={activeTab === tab.key} - onClick={() => { setActiveTab(tab.key); setFilterType('all'); setSearch(''); }} + onClick={() => changeTab(tab.key)} className={` ss-resource-tab inline-flex items-center gap-1.5 px-1 pb-2.5 text-sm font-semibold cursor-pointer @@ -1301,7 +1314,10 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea > {indentGuides} - + {skill.kind === 'agent' + ? + : + } {skill.name} {skill.disabled && disabled} @@ -1346,7 +1362,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea )} ) : ( - <>{skills.length} skill{skills.length !== 1 ? 's' : ''} in {folderCount} folder{folderCount !== 1 ? 's' : ''} + <>{activeItems.length} {activeTab === 'agents' ? 'agent' : 'skill'}{activeItems.length !== 1 ? 's' : ''} in {folderCount} folder{folderCount !== 1 ? 's' : ''} )} {folderCount > 1 && ( diff --git a/ui/src/pages/SkillDetailPage.tsx b/ui/src/pages/SkillDetailPage.tsx index c0079ed0..c510d7a7 100644 --- a/ui/src/pages/SkillDetailPage.tsx +++ b/ui/src/pages/SkillDetailPage.tsx @@ -536,7 +536,7 @@ export default function SkillDetailPage() {

- + {skill.kind !== 'agent' &&

@@ -589,7 +589,7 @@ export default function SkillDetailPage() { ) : (

No files.

)} - + } {/* Security Audit */} From e6e41c117bb0b4226e5eebc00685bc65ff4d1fa8 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 18:27:52 +0800 Subject: [PATCH 043/205] fix(ui): use props instead of parent scope vars in FolderTreeView activeItems/activeTab are not accessible inside the FolderTreeView sub-component. Use the skills prop (already passed) instead. --- ui/src/pages/ResourcesPage.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 22e4d67c..784ca98b 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -1362,7 +1362,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea )} ) : ( - <>{activeItems.length} {activeTab === 'agents' ? 'agent' : 'skill'}{activeItems.length !== 1 ? 's' : ''} in {folderCount} folder{folderCount !== 1 ? 's' : ''} + <>{skills.length} item{skills.length !== 1 ? 's' : ''} in {folderCount} folder{folderCount !== 1 ? 's' : ''} )} {folderCount > 1 && ( From ca73b40e0fe93b9b8b394fae65ca47c630bd1e85 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 30 Mar 2026 23:00:44 +0800 Subject: [PATCH 044/205] feat: warn when agents installed/synced but no targets support agents MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add Config.HasAgentTarget() helper that checks if any configured target has an agents path set. handler_install: append warning to install summary when agents are installed but no targets support them. handler_sync: after sync completes, check if agents exist in source but no targets have agent paths — add warning to response. --- internal/config/config.go | 17 +++++++++++++++++ internal/server/handler_install.go | 3 +++ internal/server/handler_sync.go | 16 ++++++++++++++++ ui/src/components/InstallForm.tsx | 4 ++-- 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/internal/config/config.go b/internal/config/config.go index 21c690e0..af302e5d 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -243,6 +243,23 @@ func (c *Config) EffectiveAgentsSource() string { return filepath.Join(BaseDir(), "agents") } +// HasAgentTarget reports whether any configured target has an agents path, +// either from the user's config agents: sub-key or from the built-in defaults. +func (c *Config) HasAgentTarget() bool { + builtinAgents := DefaultAgentTargets() + for name, tc := range c.Targets { + // Check user config agents: sub-key + if ac := tc.AgentsConfig(); ac.Path != "" { + return true + } + // Check built-in defaults + if _, ok := builtinAgents[name]; ok { + return true + } + } + return false +} + // EffectiveGitLabHosts returns GitLabHosts merged with SKILLSHARE_GITLAB_HOSTS env var. // Use this instead of accessing GitLabHosts directly for runtime behavior; // GitLabHosts contains only config-file values and is safe to persist via Save(). diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index 1b39e2ae..226ec3d6 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -230,6 +230,9 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { if firstErr != "" { summary += " (some errors)" } + if isAgent && installed > 0 && !s.cfg.HasAgentTarget() { + summary += ". Warning: none of your configured targets support agents" + } status := "ok" if installed < len(body.Skills) { diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index 9e6729db..008e755d 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -4,6 +4,8 @@ import ( "encoding/json" "maps" "net/http" + "os" + "strings" "time" "skillshare/internal/config" @@ -169,6 +171,20 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { "scope": "ui", }, "") + // Check if agents exist in source but no configured target supports them + agentsDir := s.agentsSource() + if agentsDir != "" && !s.cfg.HasAgentTarget() { + if entries, err := os.ReadDir(agentsDir); err == nil { + for _, e := range entries { + if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") && + !strings.HasSuffix(strings.ToLower(e.Name()), ".skillshare-meta.json") { + warnings = append(warnings, "Agents exist in source but none of your configured targets support agents. Agent files will not be synced.") + break + } + } + } + } + resp := map[string]any{ "results": results, "warnings": warnings, diff --git a/ui/src/components/InstallForm.tsx b/ui/src/components/InstallForm.tsx index a8c5fb55..91899a4b 100644 --- a/ui/src/components/InstallForm.tsx +++ b/ui/src/components/InstallForm.tsx @@ -578,8 +578,8 @@ export default function InstallForm({

} - confirmLabel="" - cancelLabel="Cancel" + confirmText="" + cancelText="Cancel" onConfirm={() => setShowKindSelector(false)} onCancel={() => setShowKindSelector(false)} /> From cb55ffe55ce9e1104bd02fff5a7242185c2acdd8 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 10:33:50 +0800 Subject: [PATCH 045/205] feat(agents): add positional kind filter and wire agents into list, sync, update, uninstall, collect, trash MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 1 — Foundation: - parseKindArg() defaults to kindSkills, adds "all" keyword - list: discover agents via resource.AgentKind, populate Kind field in JSON/TUI - sync agents / sync all: new sync_agents.go handles agent sync to targets - uninstall: migrate from --kind flag to positional ("uninstall agents ") Phase 2 — Core CRUD: - update agents: check status, enrich with remote, reinstall from source - uninstall agents: move .md + sidecar to agent trash with confirmation - collect agents: CollectAgents from agent-capable targets to source - trash agents: list/restore/delete/empty for agent trash directory - trash.RestoreAgent(): file-level restore (vs directory-level for skills) Behavior: all commands default to skills-only (no breaking change). "cmd agents" = agents only, "cmd all" = skills + agents. Fix: TestLog_SyncPartialStatus uses dangling symlink instead of chmod (root ignores permissions in devcontainer). 25 new integration tests, all passing. --- .mdproof/lessons-learned.md | 7 + cmd/skillshare/backup.go | 4 +- cmd/skillshare/collect.go | 12 + cmd/skillshare/collect_agents.go | 65 +++++ cmd/skillshare/kind_filter.go | 11 +- cmd/skillshare/kind_filter_test.go | 10 +- cmd/skillshare/list.go | 164 ++++++++---- cmd/skillshare/list_project.go | 132 ++++++---- cmd/skillshare/sync.go | 19 +- cmd/skillshare/sync_agents.go | 139 +++++++++++ cmd/skillshare/trash.go | 70 ++++-- cmd/skillshare/uninstall.go | 34 +-- cmd/skillshare/uninstall_agents.go | 147 +++++++++++ cmd/skillshare/update.go | 10 +- cmd/skillshare/update_agents.go | 274 ++++++++++++++++++++ internal/trash/trash.go | 45 ++++ tests/integration/agent_crud_test.go | 259 +++++++++++++++++++ tests/integration/agent_list_sync_test.go | 290 ++++++++++++++++++++++ tests/integration/install_agent_test.go | 7 +- tests/integration/log_test.go | 18 +- 20 files changed, 1554 insertions(+), 163 deletions(-) create mode 100644 cmd/skillshare/collect_agents.go create mode 100644 cmd/skillshare/sync_agents.go create mode 100644 cmd/skillshare/uninstall_agents.go create mode 100644 cmd/skillshare/update_agents.go create mode 100644 tests/integration/agent_crud_test.go create mode 100644 tests/integration/agent_list_sync_test.go diff --git a/.mdproof/lessons-learned.md b/.mdproof/lessons-learned.md index 0a6e3e9c..9a819d82 100644 --- a/.mdproof/lessons-learned.md +++ b/.mdproof/lessons-learned.md @@ -72,6 +72,13 @@ - **Fix**: Use `>/dev/null 2>&1` (redirect both stdout AND stderr) for cleanup commands in steps that need pure JSON output - **Runbooks affected**: extras_flatten_runbook.md +### [gotcha] chmod 0444 does not block writes when running as root + +- **Context**: `TestLog_SyncPartialStatus` used `os.Chmod(dir, 0444)` to make a target directory read-only, expecting sync to fail on that target and log `"status":"partial"` +- **Discovery**: The devcontainer runs as root. Root ignores POSIX permission bits — `chmod 0444` has no effect. The "broken" target synced successfully, so the oplog recorded `"status":"ok"` instead of `"partial"` +- **Fix**: Use a **dangling symlink** instead: `os.Symlink("/nonexistent/path", targetPath)`. This makes `os.Stat` return "not exist" (passes config validation) but `os.MkdirAll` fails because the symlink entry blocks directory creation. Works regardless of UID +- **Runbooks affected**: `tests/integration/log_test.go` (`TestLog_SyncPartialStatus`) + ### [gotcha] Full-directory mdproof runs cause inter-runbook state leakage - **Context**: Running `mdproof --report json /path/to/tests/` executes all runbooks sequentially in the same environment (same ssenv). Earlier runbooks install skills, modify config, fill trash — this state persists for later runbooks diff --git a/cmd/skillshare/backup.go b/cmd/skillshare/backup.go index ad34eccf..3b6da1e1 100644 --- a/cmd/skillshare/backup.go +++ b/cmd/skillshare/backup.go @@ -460,7 +460,7 @@ func restoreTUIDispatch(noTUI bool) error { if projectConfigExists(cwd) { mode = modeProject } - trashBase := resolveTrashBase(mode, cwd) + trashBase := resolveTrashBase(mode, cwd, kindSkills) items := trash.List(trashBase) if len(items) == 0 { ui.Info("Trash is empty") @@ -471,7 +471,7 @@ func restoreTUIDispatch(noTUI bool) error { modeLabel = "project" } cfgPath := resolveTrashCfgPath(mode, cwd) - destDir, err := resolveSourceDir(mode, cwd) + destDir, err := resolveSourceDir(mode, cwd, kindSkills) if err != nil { return err } diff --git a/cmd/skillshare/collect.go b/cmd/skillshare/collect.go index 82f70c01..76afabcb 100644 --- a/cmd/skillshare/collect.go +++ b/cmd/skillshare/collect.go @@ -82,6 +82,9 @@ func cmdCollect(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare collect agents"). + kind, rest := parseKindArg(rest) + if mode == modeProject { err := cmdCollectProject(rest, cwd) logCollectOp(config.ProjectConfigPath(cwd), start, err) @@ -111,6 +114,15 @@ func cmdCollect(args []string) error { } } + // Agent-only collect: use CollectAgents from agent-capable targets. + if kind == kindAgents { + cfg, loadErr := config.Load() + if loadErr != nil { + return loadErr + } + return cmdCollectAgents(cfg, dryRun, jsonOutput, start) + } + // --json implies --force (skip confirmation prompts) if jsonOutput { force = true diff --git a/cmd/skillshare/collect_agents.go b/cmd/skillshare/collect_agents.go new file mode 100644 index 00000000..9c6fe1fc --- /dev/null +++ b/cmd/skillshare/collect_agents.go @@ -0,0 +1,65 @@ +package main + +import ( + "fmt" + "os" + "time" + + "skillshare/internal/config" + "skillshare/internal/sync" + "skillshare/internal/ui" +) + +// cmdCollectAgents collects non-symlinked agent .md files from agent-capable targets +// back to the agent source directory. +func cmdCollectAgents(cfg *config.Config, dryRun, jsonOutput bool, start time.Time) error { + agentsSource := cfg.EffectiveAgentsSource() + + if err := os.MkdirAll(agentsSource, 0755); err != nil { + return fmt.Errorf("cannot create agents source directory: %w", err) + } + + builtinAgents := config.DefaultAgentTargets() + var allCollected []string + + if !jsonOutput { + ui.Header(ui.WithModeLabel("Collect agents")) + } + + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + if _, err := os.Stat(agentPath); err != nil { + continue // target agent dir doesn't exist, skip + } + + collected, err := sync.CollectAgents(agentPath, agentsSource, dryRun, os.Stdout) + if err != nil { + if !jsonOutput { + ui.Warning("%s: collect failed: %v", name, err) + } + continue + } + + if len(collected) > 0 { + allCollected = append(allCollected, collected...) + if !jsonOutput { + ui.Success("%s: collected %d agent(s)", name, len(collected)) + } + } + } + + if !jsonOutput { + if len(allCollected) == 0 { + ui.Info("No local agents found to collect") + } else { + fmt.Println() + ui.Info("Collected %d agent(s) to %s", len(allCollected), agentsSource) + } + } + + return nil +} diff --git a/cmd/skillshare/kind_filter.go b/cmd/skillshare/kind_filter.go index ec44159f..5d490b2b 100644 --- a/cmd/skillshare/kind_filter.go +++ b/cmd/skillshare/kind_filter.go @@ -13,11 +13,12 @@ const ( // parseKindArg extracts a kind filter from the first positional argument. // Returns the filter and remaining args. -// Recognized values: "skills", "skill", "agents", "agent". -// If the first arg is not a kind keyword, returns kindAll with args unchanged. +// Recognized values: "skills", "skill", "agents", "agent", "all". +// If the first arg is not a kind keyword, returns kindSkills with args unchanged +// (default is skills-only; explicit "all" required for both). func parseKindArg(args []string) (resourceKindFilter, []string) { if len(args) == 0 { - return kindAll, args + return kindSkills, args } switch args[0] { @@ -25,8 +26,10 @@ func parseKindArg(args []string) (resourceKindFilter, []string) { return kindSkills, args[1:] case "agents", "agent": return kindAgents, args[1:] + case "all": + return kindAll, args[1:] default: - return kindAll, args + return kindSkills, args } } diff --git a/cmd/skillshare/kind_filter_test.go b/cmd/skillshare/kind_filter_test.go index 3a9fb511..9e4075e8 100644 --- a/cmd/skillshare/kind_filter_test.go +++ b/cmd/skillshare/kind_filter_test.go @@ -8,15 +8,17 @@ func TestParseKindArg(t *testing.T) { wantKind resourceKindFilter wantRest []string }{ - {nil, kindAll, nil}, - {[]string{}, kindAll, []string{}}, + {nil, kindSkills, nil}, + {[]string{}, kindSkills, []string{}}, {[]string{"skills"}, kindSkills, []string{}}, {[]string{"skill"}, kindSkills, []string{}}, {[]string{"agents"}, kindAgents, []string{}}, {[]string{"agent"}, kindAgents, []string{}}, + {[]string{"all"}, kindAll, []string{}}, + {[]string{"all", "--json"}, kindAll, []string{"--json"}}, {[]string{"agents", "tutor"}, kindAgents, []string{"tutor"}}, - {[]string{"--json"}, kindAll, []string{"--json"}}, - {[]string{"my-skill"}, kindAll, []string{"my-skill"}}, + {[]string{"--json"}, kindSkills, []string{"--json"}}, + {[]string{"my-skill"}, kindSkills, []string{"my-skill"}}, } for _, tt := range tests { diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 828b37a8..20f0fb2f 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -12,6 +12,7 @@ import ( "skillshare/internal/config" "skillshare/internal/git" "skillshare/internal/install" + "skillshare/internal/resource" "skillshare/internal/sync" "skillshare/internal/ui" "skillshare/internal/utils" @@ -207,6 +208,7 @@ func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { for i, d := range discovered { skills[i] = skillEntry{ Name: d.FlatName, + Kind: "skill", IsNested: d.IsInRepo || utils.HasNestedSeparator(d.FlatName), RelPath: d.RelPath, Disabled: d.Disabled, @@ -261,6 +263,43 @@ func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { return skills } +// discoverAndBuildAgentEntries discovers agents from the given source directory +// and builds skillEntry items with Kind="agent". Reads sidecar metadata for +// installed agents (.skillshare-meta.json). +func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { + if agentsSource == "" { + return nil + } + discovered, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil { + return nil + } + + entries := make([]skillEntry, len(discovered)) + for i, d := range discovered { + entries[i] = skillEntry{ + Name: d.Name, + Kind: "agent", + RelPath: d.RelPath, + IsNested: d.IsNested, + Disabled: d.Disabled, + } + // Read sidecar metadata: .skillshare-meta.json + metaPath := filepath.Join(agentsSource, d.Name+".skillshare-meta.json") + if data, readErr := os.ReadFile(metaPath); readErr == nil { + var meta install.SkillMeta + if jsonErr := json.Unmarshal(data, &meta); jsonErr == nil { + entries[i].Source = meta.Source + entries[i].Type = meta.Type + if !meta.InstalledAt.IsZero() { + entries[i].InstalledAt = meta.InstalledAt.Format("2006-01-02") + } + } + } + } + return entries +} + // extractGroupDir returns the parent directory from a RelPath. // "frontend/react-helper" → "frontend", "my-skill" → "", "_team/frontend/ui" → "_team/frontend" func extractGroupDir(relPath string) string { @@ -512,6 +551,9 @@ func cmdList(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare list agents"). + kind, rest := parseKindArg(rest) + opts, err := parseListArgs(rest) if opts.ShowHelp { printListHelp() @@ -522,7 +564,7 @@ func cmdList(args []string) error { } if mode == modeProject { - return cmdListProject(cwd, opts) + return cmdListProject(cwd, opts, kind) } cfg, err := config.Load() @@ -533,17 +575,23 @@ func cmdList(args []string) error { // TTY + not JSON + TUI enabled → launch TUI with async loading (no blank screen) if !opts.JSON && shouldLaunchTUI(opts.NoTUI, cfg) { loadFn := func() listLoadResult { - discovered, err := sync.DiscoverSourceSkillsAll(cfg.Source) - if err != nil { - return listLoadResult{err: fmt.Errorf("cannot discover skills: %w", err)} + var allEntries []skillEntry + if kind.IncludesSkills() { + discovered, discErr := sync.DiscoverSourceSkillsAll(cfg.Source) + if discErr != nil { + return listLoadResult{err: fmt.Errorf("cannot discover skills: %w", discErr)} + } + allEntries = append(allEntries, buildSkillEntries(discovered)...) + } + if kind.IncludesAgents() { + allEntries = append(allEntries, discoverAndBuildAgentEntries(cfg.EffectiveAgentsSource())...) } - skills := buildSkillEntries(discovered) - total := len(skills) - skills = filterSkillEntries(skills, opts.Pattern, opts.TypeFilter) + total := len(allEntries) + allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) if opts.SortBy != "" { - sortSkillEntries(skills, opts.SortBy) + sortSkillEntries(allEntries, opts.SortBy) } - return listLoadResult{skills: toSkillItems(skills), totalCount: total} + return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } action, skillName, err := runListTUI(loadFn, "global", cfg.Source, cfg.Targets) if err != nil { @@ -551,8 +599,11 @@ func cmdList(args []string) error { } switch action { case "empty": - ui.Info("No skills installed") - ui.Info("Use 'skillshare install ' to install a skill") + resourceLabel := "skills" + if kind == kindAgents { + resourceLabel = "agents" + } + ui.Info("No %s installed", resourceLabel) return nil case "audit": return cmdAudit([]string{"-g", skillName}) @@ -565,82 +616,110 @@ func cmdList(args []string) error { } // Non-TUI path (JSON or plain text): synchronous loading with spinner + resourceLabel := "skills" + if kind == kindAgents { + resourceLabel = "agents" + } else if kind == kindAll { + resourceLabel = "resources" + } + var sp *ui.Spinner if !opts.JSON && ui.IsTTY() { - sp = ui.StartSpinner("Loading skills...") + sp = ui.StartSpinner(fmt.Sprintf("Loading %s...", resourceLabel)) } - discovered, err := sync.DiscoverSourceSkillsAll(cfg.Source) - if err != nil { + + var allEntries []skillEntry + var trackedRepos []string + var discoveredSkills []sync.DiscoveredSkill + + if kind.IncludesSkills() { + var discErr error + discoveredSkills, discErr = sync.DiscoverSourceSkillsAll(cfg.Source) + if discErr != nil { + if sp != nil { + sp.Fail("Discovery failed") + } + return fmt.Errorf("cannot discover skills: %w", discErr) + } + trackedRepos = extractTrackedRepos(discoveredSkills) if sp != nil { - sp.Fail("Discovery failed") + sp.Update(fmt.Sprintf("Reading metadata for %d skills...", len(discoveredSkills))) } - return fmt.Errorf("cannot discover skills: %w", err) + allEntries = append(allEntries, buildSkillEntries(discoveredSkills)...) } - trackedRepos := extractTrackedRepos(discovered) - if sp != nil { - sp.Update(fmt.Sprintf("Reading metadata for %d skills...", len(discovered))) + if kind.IncludesAgents() { + agentEntries := discoverAndBuildAgentEntries(cfg.EffectiveAgentsSource()) + allEntries = append(allEntries, agentEntries...) } - skills := buildSkillEntries(discovered) + if sp != nil { - sp.Success(fmt.Sprintf("Loaded %d skills", len(skills))) + sp.Success(fmt.Sprintf("Loaded %d %s", len(allEntries), resourceLabel)) } - totalCount := len(skills) + totalCount := len(allEntries) hasFilter := opts.Pattern != "" || opts.TypeFilter != "" // Apply filter and sort - skills = filterSkillEntries(skills, opts.Pattern, opts.TypeFilter) + allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) if opts.SortBy != "" { - sortSkillEntries(skills, opts.SortBy) + sortSkillEntries(allEntries, opts.SortBy) } // JSON output if opts.JSON { - return displaySkillsJSON(skills) + return displaySkillsJSON(allEntries) } - // Handle empty results before starting pager - if len(skills) == 0 && len(trackedRepos) == 0 && !hasFilter { - ui.Info("No skills installed") - ui.Info("Use 'skillshare install ' to install a skill") + // Handle empty results + if len(allEntries) == 0 && len(trackedRepos) == 0 && !hasFilter { + ui.Info("No %s installed", resourceLabel) + if kind.IncludesSkills() { + ui.Info("Use 'skillshare install ' to install a skill") + } return nil } - if hasFilter && len(skills) == 0 { + if hasFilter && len(allEntries) == 0 { if opts.Pattern != "" && opts.TypeFilter != "" { - ui.Info("No skills matching %q (type: %s)", opts.Pattern, opts.TypeFilter) + ui.Info("No %s matching %q (type: %s)", resourceLabel, opts.Pattern, opts.TypeFilter) } else if opts.Pattern != "" { - ui.Info("No skills matching %q", opts.Pattern) + ui.Info("No %s matching %q", resourceLabel, opts.Pattern) } else { - ui.Info("No skills matching type %q", opts.TypeFilter) + ui.Info("No %s matching type %q", resourceLabel, opts.TypeFilter) } return nil } // Plain text output (--no-tui or non-TTY) - if len(skills) > 0 { - ui.Header("Installed skills") + if len(allEntries) > 0 { + headerLabel := "Installed skills" + if kind == kindAgents { + headerLabel = "Installed agents" + } else if kind == kindAll { + headerLabel = "Installed skills & agents" + } + ui.Header(headerLabel) if opts.Verbose { - displaySkillsVerbose(skills) + displaySkillsVerbose(allEntries) } else { - displaySkillsCompact(skills) + displaySkillsCompact(allEntries) } } // Hide tracked repos section when filter/pattern is active if len(trackedRepos) > 0 && !hasFilter { - displayTrackedRepos(trackedRepos, discovered, cfg.Source) + displayTrackedRepos(trackedRepos, discoveredSkills, cfg.Source) } // Show match stats when filter is active - if hasFilter && len(skills) > 0 { + if hasFilter && len(allEntries) > 0 { fmt.Println() if opts.Pattern != "" { - ui.Info("%d of %d skill(s) matching %q", len(skills), totalCount, opts.Pattern) + ui.Info("%d of %d %s matching %q", len(allEntries), totalCount, resourceLabel, opts.Pattern) } else { - ui.Info("%d of %d skill(s)", len(skills), totalCount) + ui.Info("%d of %d %s", len(allEntries), totalCount, resourceLabel) } - } else if !opts.Verbose && len(skills) > 0 { + } else if !opts.Verbose && len(allEntries) > 0 { fmt.Println() ui.Info("Use --verbose for more details") } @@ -678,6 +757,7 @@ func displaySkillsJSON(skills []skillEntry) error { for i, s := range skills { items[i] = skillJSON{ Name: s.Name, + Kind: s.Kind, RelPath: s.RelPath, Source: s.Source, Type: s.Type, diff --git a/cmd/skillshare/list_project.go b/cmd/skillshare/list_project.go index a8f8c08d..fbc4c512 100644 --- a/cmd/skillshare/list_project.go +++ b/cmd/skillshare/list_project.go @@ -9,14 +9,22 @@ import ( "skillshare/internal/ui" ) -func cmdListProject(root string, opts listOptions) error { +func cmdListProject(root string, opts listOptions, kind resourceKindFilter) error { if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { return err } } - sourcePath := filepath.Join(root, ".skillshare", "skills") + skillsSource := filepath.Join(root, ".skillshare", "skills") + agentsSource := filepath.Join(root, ".skillshare", "agents") + + resourceLabel := "skills" + if kind == kindAgents { + resourceLabel = "agents" + } else if kind == kindAll { + resourceLabel = "resources" + } // TTY + not JSON + TUI enabled → launch TUI with async loading (no blank screen) if !opts.JSON && shouldLaunchTUI(opts.NoTUI, nil) { @@ -30,24 +38,32 @@ func cmdListProject(root string, opts listOptions) error { sortBy = "name" } loadFn := func() listLoadResult { - discovered, err := sync.DiscoverSourceSkillsAll(sourcePath) - if err != nil { - return listLoadResult{err: fmt.Errorf("cannot discover project skills: %w", err)} + var allEntries []skillEntry + if kind.IncludesSkills() { + discovered, err := sync.DiscoverSourceSkillsAll(skillsSource) + if err != nil { + return listLoadResult{err: fmt.Errorf("cannot discover project skills: %w", err)} + } + allEntries = append(allEntries, buildSkillEntries(discovered)...) } - skills := buildSkillEntries(discovered) - total := len(skills) - skills = filterSkillEntries(skills, opts.Pattern, opts.TypeFilter) - sortSkillEntries(skills, sortBy) - return listLoadResult{skills: toSkillItems(skills), totalCount: total} + if kind.IncludesAgents() { + allEntries = append(allEntries, discoverAndBuildAgentEntries(agentsSource)...) + } + total := len(allEntries) + allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) + sortSkillEntries(allEntries, sortBy) + return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } - action, skillName, err := runListTUI(loadFn, "project", sourcePath, targets) + action, skillName, err := runListTUI(loadFn, "project", skillsSource, targets) if err != nil { return err } switch action { case "empty": - ui.Info("No skills installed") - ui.Info("Use 'skillshare install -p ' to install a skill") + ui.Info("No %s installed", resourceLabel) + if kind.IncludesSkills() { + ui.Info("Use 'skillshare install -p ' to install a skill") + } return nil case "audit": return cmdAudit([]string{"-p", skillName}) @@ -63,96 +79,114 @@ func cmdListProject(root string, opts listOptions) error { // Non-TUI path (JSON or plain text): synchronous loading with spinner var sp *ui.Spinner if !opts.JSON && ui.IsTTY() { - sp = ui.StartSpinner("Loading skills...") + sp = ui.StartSpinner(fmt.Sprintf("Loading %s...", resourceLabel)) } - // Use DiscoverSourceSkillsAll so disabled skills appear in the listing - discovered, err := sync.DiscoverSourceSkillsAll(sourcePath) - if err != nil { + var allEntries []skillEntry + var trackedRepos []string + var discoveredSkills []sync.DiscoveredSkill + + if kind.IncludesSkills() { + var discErr error + discoveredSkills, discErr = sync.DiscoverSourceSkillsAll(skillsSource) + if discErr != nil { + if sp != nil { + sp.Fail("Discovery failed") + } + return fmt.Errorf("cannot discover project skills: %w", discErr) + } + trackedRepos = extractTrackedRepos(discoveredSkills) if sp != nil { - sp.Fail("Discovery failed") + sp.Update(fmt.Sprintf("Reading metadata for %d skills...", len(discoveredSkills))) } - return fmt.Errorf("cannot discover project skills: %w", err) + allEntries = append(allEntries, buildSkillEntries(discoveredSkills)...) } - trackedRepos := extractTrackedRepos(discovered) - if sp != nil { - sp.Update(fmt.Sprintf("Reading metadata for %d skills...", len(discovered))) + if kind.IncludesAgents() { + allEntries = append(allEntries, discoverAndBuildAgentEntries(agentsSource)...) } - skills := buildSkillEntries(discovered) + if sp != nil { - sp.Success(fmt.Sprintf("Loaded %d skills", len(skills))) + sp.Success(fmt.Sprintf("Loaded %d %s", len(allEntries), resourceLabel)) } - totalCount := len(skills) + totalCount := len(allEntries) hasFilter := opts.Pattern != "" || opts.TypeFilter != "" // Apply filter and sort - skills = filterSkillEntries(skills, opts.Pattern, opts.TypeFilter) + allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) sortBy := opts.SortBy if sortBy == "" { sortBy = "name" // project mode default } - sortSkillEntries(skills, sortBy) + sortSkillEntries(allEntries, sortBy) // JSON output if opts.JSON { - return displaySkillsJSON(skills) + return displaySkillsJSON(allEntries) } - // Handle empty results before starting pager - if len(skills) == 0 && len(trackedRepos) == 0 && !hasFilter { - ui.Info("No skills installed") - ui.Info("Use 'skillshare install -p ' to install a skill") + // Handle empty results + if len(allEntries) == 0 && len(trackedRepos) == 0 && !hasFilter { + ui.Info("No %s installed", resourceLabel) + if kind.IncludesSkills() { + ui.Info("Use 'skillshare install -p ' to install a skill") + } return nil } - if hasFilter && len(skills) == 0 { + if hasFilter && len(allEntries) == 0 { if opts.Pattern != "" && opts.TypeFilter != "" { - ui.Info("No skills matching %q (type: %s)", opts.Pattern, opts.TypeFilter) + ui.Info("No %s matching %q (type: %s)", resourceLabel, opts.Pattern, opts.TypeFilter) } else if opts.Pattern != "" { - ui.Info("No skills matching %q", opts.Pattern) + ui.Info("No %s matching %q", resourceLabel, opts.Pattern) } else { - ui.Info("No skills matching type %q", opts.TypeFilter) + ui.Info("No %s matching type %q", resourceLabel, opts.TypeFilter) } return nil } // Plain text output (--no-tui or non-TTY) - if len(skills) > 0 { - ui.Header("Installed skills (project)") + if len(allEntries) > 0 { + headerLabel := "Installed skills (project)" + if kind == kindAgents { + headerLabel = "Installed agents (project)" + } else if kind == kindAll { + headerLabel = "Installed skills & agents (project)" + } + ui.Header(headerLabel) if opts.Verbose { - displaySkillsVerbose(skills) + displaySkillsVerbose(allEntries) } else { - displaySkillsCompact(skills) + displaySkillsCompact(allEntries) } } // Hide tracked repos section when filter/pattern is active if len(trackedRepos) > 0 && !hasFilter { - displayTrackedRepos(trackedRepos, discovered, sourcePath) + displayTrackedRepos(trackedRepos, discoveredSkills, skillsSource) } // Show match stats when filter is active - if hasFilter && len(skills) > 0 { + if hasFilter && len(allEntries) > 0 { fmt.Println() if opts.Pattern != "" { - ui.Info("%d of %d skill(s) matching %q", len(skills), totalCount, opts.Pattern) + ui.Info("%d of %d %s matching %q", len(allEntries), totalCount, resourceLabel, opts.Pattern) } else { - ui.Info("%d of %d skill(s)", len(skills), totalCount) + ui.Info("%d of %d %s", len(allEntries), totalCount, resourceLabel) } } else { fmt.Println() trackedCount := 0 remoteCount := 0 - for _, skill := range skills { - if skill.RepoName != "" { + for _, entry := range allEntries { + if entry.RepoName != "" { trackedCount++ - } else if skill.Source != "" { + } else if entry.Source != "" { remoteCount++ } } - localCount := len(skills) - trackedCount - remoteCount - ui.Info("%d skill(s): %d tracked, %d remote, %d local", len(skills), trackedCount, remoteCount, localCount) + localCount := len(allEntries) - trackedCount - remoteCount + ui.Info("%d %s: %d tracked, %d remote, %d local", len(allEntries), resourceLabel, trackedCount, remoteCount, localCount) } return nil diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index 354d9a0b..dd6762f3 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -100,6 +100,9 @@ func cmdSync(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare sync agents"). + kind, rest := parseKindArg(rest) + dryRun, force, jsonOutput := parseSyncFlags(rest) prevDiagOutput := sync.DiagOutput @@ -160,7 +163,14 @@ func cmdSync(args []string) error { } } - // Phase 1: Discovery + // Agent-only mode: skip skill discovery/sync entirely + if kind == kindAgents { + _, agentErr := syncAgentsGlobal(cfg, dryRun, force, jsonOutput, start) + logSyncOp(config.ConfigPath(), syncLogStats{DryRun: dryRun, Force: force}, start, agentErr) + return agentErr + } + + // Phase 1: Discovery (skills) var spinner *ui.Spinner if !jsonOutput { spinner = ui.StartSpinner("Discovering skills") @@ -261,6 +271,13 @@ func cmdSync(args []string) error { return syncOutputJSON(results, dryRun, start, ignoreStats, syncErr) } + // Agent sync when kind=all (after skill sync) + if kind == kindAll { + if _, agentErr := syncAgentsGlobal(cfg, dryRun, force, jsonOutput, start); agentErr != nil && syncErr == nil { + syncErr = agentErr + } + } + if hasAll { fmt.Println() if extrasErr := cmdSyncExtras(rest); extrasErr != nil { diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go new file mode 100644 index 00000000..a5efddd4 --- /dev/null +++ b/cmd/skillshare/sync_agents.go @@ -0,0 +1,139 @@ +package main + +import ( + "fmt" + "os" + "time" + + "skillshare/internal/config" + "skillshare/internal/resource" + "skillshare/internal/sync" + "skillshare/internal/ui" +) + +// agentSyncStats aggregates per-target agent sync results. +type agentSyncStats struct { + linked, skipped, updated, pruned int +} + +// syncAgentsGlobal discovers agents and syncs them to all agent-capable targets. +// Returns total stats and any error. +func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start time.Time) (agentSyncStats, error) { + agentsSource := cfg.EffectiveAgentsSource() + + // Check agent source exists + if _, err := os.Stat(agentsSource); err != nil { + if os.IsNotExist(err) { + if !jsonOutput { + ui.Info("No agents source directory (%s)", agentsSource) + } + return agentSyncStats{}, nil + } + return agentSyncStats{}, fmt.Errorf("cannot access agents source: %w", err) + } + + // Discover agents + agents, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil { + return agentSyncStats{}, fmt.Errorf("cannot discover agents: %w", err) + } + + if len(agents) == 0 { + if !jsonOutput { + ui.Info("No agents found in %s", agentsSource) + } + return agentSyncStats{}, nil + } + + if !jsonOutput { + ui.Header("Syncing agents") + if dryRun { + ui.Warning("Dry run mode - no changes will be made") + } + } + + // Resolve agent-capable targets: user config agents sub-key + built-in defaults + builtinAgents := config.DefaultAgentTargets() + var totals agentSyncStats + var syncErr error + + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue // target has no agent path + } + + tc := cfg.Targets[name] + ac := tc.AgentsConfig() + mode := ac.Mode + if mode == "" { + mode = "merge" // default agent sync mode + } + + result, err := sync.SyncAgents(agents, agentsSource, agentPath, mode, dryRun, force) + if err != nil { + if !jsonOutput { + ui.Error("%s: agent sync failed: %v", name, err) + } + syncErr = fmt.Errorf("some agent targets failed to sync") + continue + } + + // Prune orphan agent links/copies + var pruned []string + switch mode { + case "copy": + pruned, _ = sync.PruneOrphanAgentCopies(agentPath, agents, dryRun) + case "merge": + pruned, _ = sync.PruneOrphanAgentLinks(agentPath, agents, dryRun) + } + + stats := agentSyncStats{ + linked: len(result.Linked), + skipped: len(result.Skipped), + updated: len(result.Updated), + pruned: len(pruned), + } + totals.linked += stats.linked + totals.skipped += stats.skipped + totals.updated += stats.updated + totals.pruned += stats.pruned + + if !jsonOutput { + reportAgentSyncResult(name, mode, stats, dryRun) + } + } + + if !jsonOutput { + fmt.Println() + ui.Info("Agent sync: %d linked, %d local, %d updated, %d pruned (%s)", + totals.linked, totals.skipped, totals.updated, totals.pruned, + formatDuration(start)) + } + + return totals, syncErr +} + +// resolveAgentTargetPath returns the effective agent path for a target, +// checking user config first, then built-in defaults. Returns "" if none. +func resolveAgentTargetPath(tc config.TargetConfig, builtinAgents map[string]config.TargetConfig, name string) string { + if ac := tc.AgentsConfig(); ac.Path != "" { + return config.ExpandPath(ac.Path) + } + if builtin, ok := builtinAgents[name]; ok { + return config.ExpandPath(builtin.Path) + } + return "" +} + +// reportAgentSyncResult prints per-target agent sync status. +func reportAgentSyncResult(name, mode string, stats agentSyncStats, dryRun bool) { + if stats.linked > 0 || stats.updated > 0 || stats.pruned > 0 { + ui.Success("%s: agents %s (%d linked, %d local, %d updated, %d pruned)", + name, mode, stats.linked, stats.skipped, stats.updated, stats.pruned) + } else if stats.skipped > 0 { + ui.Success("%s: agents %s (%d local preserved)", name, mode, stats.skipped) + } else { + ui.Success("%s: agents %s (up to date)", name, mode) + } +} diff --git a/cmd/skillshare/trash.go b/cmd/skillshare/trash.go index f7ef5e1c..ccdab4e6 100644 --- a/cmd/skillshare/trash.go +++ b/cmd/skillshare/trash.go @@ -33,6 +33,9 @@ func cmdTrash(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare trash agents list"). + kind, rest := parseKindArg(rest) + if len(rest) == 0 { printTrashHelp() return nil @@ -54,13 +57,13 @@ func cmdTrash(args []string) error { switch sub { case "list", "ls": - return trashList(mode, cwd, noTUI) + return trashList(mode, cwd, noTUI, kind) case "restore": - return trashRestore(mode, cwd, filteredArgs) + return trashRestore(mode, cwd, filteredArgs, kind) case "delete", "rm": - return trashDelete(mode, cwd, filteredArgs) + return trashDelete(mode, cwd, filteredArgs, kind) case "empty": - return trashEmpty(mode, cwd) + return trashEmpty(mode, cwd, kind) case "--help", "-h", "help": printTrashHelp() return nil @@ -70,8 +73,8 @@ func cmdTrash(args []string) error { } } -func trashList(mode runMode, cwd string, noTUI bool) error { - trashBase := resolveTrashBase(mode, cwd) +func trashList(mode runMode, cwd string, noTUI bool, kind resourceKindFilter) error { + trashBase := resolveTrashBase(mode, cwd, kind) items := trash.List(trashBase) if len(items) == 0 { @@ -86,7 +89,7 @@ func trashList(mode runMode, cwd string, noTUI bool) error { modeLabel = "project" } cfgPath := resolveTrashCfgPath(mode, cwd) - destDir, err := resolveSourceDir(mode, cwd) + destDir, err := resolveSourceDir(mode, cwd, kind) if err != nil { return err } @@ -110,7 +113,7 @@ func trashList(mode runMode, cwd string, noTUI bool) error { return nil } -func trashRestore(mode runMode, cwd string, args []string) error { +func trashRestore(mode runMode, cwd string, args []string, kind resourceKindFilter) error { start := time.Now() var name string @@ -136,7 +139,7 @@ func trashRestore(mode runMode, cwd string, args []string) error { cfgPath := resolveTrashCfgPath(mode, cwd) - trashBase := resolveTrashBase(mode, cwd) + trashBase := resolveTrashBase(mode, cwd, kind) entry := trash.FindByName(trashBase, name) if entry == nil { cmdErr := fmt.Errorf("'%s' not found in trash", name) @@ -144,28 +147,39 @@ func trashRestore(mode runMode, cwd string, args []string) error { return cmdErr } - destDir, err := resolveSourceDir(mode, cwd) + destDir, err := resolveSourceDir(mode, cwd, kind) if err != nil { logTrashOp(cfgPath, "restore", 0, name, start, err) return err } - if err := trash.Restore(entry, destDir); err != nil { - logTrashOp(cfgPath, "restore", 0, name, start, err) - return err + if kind == kindAgents { + if err := trash.RestoreAgent(entry, destDir); err != nil { + logTrashOp(cfgPath, "restore", 0, name, start, err) + return err + } + } else { + if err := trash.Restore(entry, destDir); err != nil { + logTrashOp(cfgPath, "restore", 0, name, start, err) + return err + } } ui.Success("Restored: %s", name) age := time.Since(entry.Date) ui.Info("Trashed %s ago, now back in %s", formatAge(age), destDir) ui.SectionLabel("Next Steps") - ui.Info("Run 'skillshare sync' to update targets") + syncHint := "skillshare sync" + if kind == kindAgents { + syncHint = "skillshare sync agents" + } + ui.Info("Run '%s' to update targets", syncHint) logTrashOp(cfgPath, "restore", 1, name, start, nil) return nil } -func trashDelete(mode runMode, cwd string, args []string) error { +func trashDelete(mode runMode, cwd string, args []string, kind resourceKindFilter) error { var name string for _, arg := range args { switch { @@ -187,7 +201,7 @@ func trashDelete(mode runMode, cwd string, args []string) error { return fmt.Errorf("skill name is required") } - trashBase := resolveTrashBase(mode, cwd) + trashBase := resolveTrashBase(mode, cwd, kind) entry := trash.FindByName(trashBase, name) if entry == nil { return fmt.Errorf("'%s' not found in trash", name) @@ -201,11 +215,11 @@ func trashDelete(mode runMode, cwd string, args []string) error { return nil } -func trashEmpty(mode runMode, cwd string) error { +func trashEmpty(mode runMode, cwd string, kind resourceKindFilter) error { start := time.Now() cfgPath := resolveTrashCfgPath(mode, cwd) - trashBase := resolveTrashBase(mode, cwd) + trashBase := resolveTrashBase(mode, cwd, kind) items := trash.List(trashBase) if len(items) == 0 { @@ -238,14 +252,30 @@ func trashEmpty(mode runMode, cwd string) error { return nil } -func resolveTrashBase(mode runMode, cwd string) string { +func resolveTrashBase(mode runMode, cwd string, kind resourceKindFilter) string { + if kind == kindAgents { + if mode == modeProject { + return trash.ProjectAgentTrashDir(cwd) + } + return trash.AgentTrashDir() + } if mode == modeProject { return trash.ProjectTrashDir(cwd) } return trash.TrashDir() } -func resolveSourceDir(mode runMode, cwd string) (string, error) { +func resolveSourceDir(mode runMode, cwd string, kind resourceKindFilter) (string, error) { + if kind == kindAgents { + if mode == modeProject { + return fmt.Sprintf("%s/.skillshare/agents", cwd), nil + } + cfg, err := config.Load() + if err != nil { + return "", fmt.Errorf("failed to load config: %w", err) + } + return cfg.EffectiveAgentsSource(), nil + } if mode == modeProject { return fmt.Sprintf("%s/.skillshare/skills", cwd), nil } diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 2127c433..a61da93b 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -25,7 +25,7 @@ import ( type uninstallOptions struct { skillNames []string // positional args (0+) groups []string // --group/-G values (repeatable) - kind resourceKindFilter // --kind skill|agent + kind resourceKindFilter // set by positional filter (e.g. "uninstall agents") all bool // --all: remove ALL skills from source force bool dryRun bool @@ -70,19 +70,6 @@ func parseUninstallArgs(args []string) (*uninstallOptions, bool, error) { opts.dryRun = true case arg == "--json": opts.jsonOutput = true - case arg == "--kind": - i++ - if i >= len(args) { - return nil, false, fmt.Errorf("--kind requires a value (skill or agent)") - } - switch strings.ToLower(args[i]) { - case "skill", "skills": - opts.kind = kindSkills - case "agent", "agents": - opts.kind = kindAgents - default: - return nil, false, fmt.Errorf("--kind must be 'skill' or 'agent', got %q", args[i]) - } case arg == "--group" || arg == "-G": i++ if i >= len(args) { @@ -563,12 +550,8 @@ func cmdUninstall(args []string) error { applyModeLabel(mode) - // Extract --kind flag before parsing other args. - kind, rest, err := parseKindFlag(rest) - if err != nil { - return err - } - _ = kind // TODO: wire agent-specific uninstall path + // Extract kind filter (e.g. "skillshare uninstall agents myagent"). + kind, rest := parseKindArg(rest) if mode == modeProject { err := cmdUninstallProject(rest, cwd) @@ -598,16 +581,11 @@ func cmdUninstall(args []string) error { return fmt.Errorf("failed to load config: %w", err) } - // When --kind agent is set, resolve targets from the agents source directory. + // Agent-only uninstall: move .md + sidecar to agent trash, then return. if kind == kindAgents { agentsDir := cfg.EffectiveAgentsSource() - for i, name := range opts.skillNames { - agentPath := filepath.Join(agentsDir, name+".md") - if _, statErr := os.Stat(agentPath); statErr == nil { - opts.skillNames[i] = agentPath - } - } - // TODO: implement full agent uninstall (remove .md + .skillshare-meta.json, update registry) + err := cmdUninstallAgents(agentsDir, opts, config.ConfigPath(), start) + return err } // --- Phase 1: RESOLVE --- diff --git a/cmd/skillshare/uninstall_agents.go b/cmd/skillshare/uninstall_agents.go new file mode 100644 index 00000000..022a0f62 --- /dev/null +++ b/cmd/skillshare/uninstall_agents.go @@ -0,0 +1,147 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "time" + + "skillshare/internal/oplog" + "skillshare/internal/resource" + "skillshare/internal/trash" + "skillshare/internal/ui" +) + +// cmdUninstallAgents removes agents from the source directory by moving them to agent trash. +func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string, start time.Time) error { + if _, err := os.Stat(agentsDir); err != nil { + if os.IsNotExist(err) { + return fmt.Errorf("agents source directory does not exist: %s", agentsDir) + } + return fmt.Errorf("cannot access agents source: %w", err) + } + + // Resolve agent names + var names []string + if opts.all { + discovered, err := resource.AgentKind{}.Discover(agentsDir) + if err != nil { + return fmt.Errorf("failed to discover agents: %w", err) + } + for _, d := range discovered { + names = append(names, d.Name) + } + if len(names) == 0 { + ui.Info("No agents found") + return nil + } + } else { + names = opts.skillNames + } + + if len(names) == 0 { + return fmt.Errorf("specify agent name(s) or --all") + } + + // Validate all agents exist before removing any + for _, name := range names { + agentFile := filepath.Join(agentsDir, name+".md") + if _, err := os.Stat(agentFile); err != nil { + return fmt.Errorf("agent %q not found in %s", name, agentsDir) + } + } + + // Confirmation (unless --force or --json) + if !opts.force && !opts.jsonOutput { + ui.Warning("This will remove %d agent(s): %s", len(names), strings.Join(names, ", ")) + fmt.Print("Continue? [y/N] ") + var input string + fmt.Scanln(&input) + input = strings.TrimSpace(strings.ToLower(input)) + if input != "y" && input != "yes" { + ui.Info("Cancelled") + return nil + } + } + + trashBase := trash.AgentTrashDir() + var removed []string + var failed []string + + for _, name := range names { + agentFile := filepath.Join(agentsDir, name+".md") + metaFile := filepath.Join(agentsDir, name+".skillshare-meta.json") + + if opts.dryRun { + ui.Info("[dry-run] Would remove agent: %s", name) + removed = append(removed, name) + continue + } + + _, err := trash.MoveAgentToTrash(agentFile, metaFile, name, trashBase) + if err != nil { + ui.Error("Failed to remove %s: %v", name, err) + failed = append(failed, name) + continue + } + + ui.Success("Removed agent: %s", name) + removed = append(removed, name) + } + + // JSON output + if opts.jsonOutput { + output := struct { + Removed []string `json:"removed"` + Failed []string `json:"failed"` + DryRun bool `json:"dry_run"` + Duration string `json:"duration"` + }{ + Removed: removed, + Failed: failed, + DryRun: opts.dryRun, + Duration: formatDuration(start), + } + var jsonErr error + if len(failed) > 0 { + jsonErr = fmt.Errorf("%d agent(s) failed to uninstall", len(failed)) + } + return writeJSONResult(&output, jsonErr) + } + + // Summary + if !opts.dryRun { + fmt.Println() + ui.Info("%d agent(s) removed, %d failed", len(removed), len(failed)) + if len(removed) > 0 { + ui.Info("Run 'skillshare sync agents' to update targets") + } + } + + // Oplog + logUninstallAgentOp(cfgPath, names, len(removed), len(failed), opts.dryRun, start) + + if len(failed) > 0 { + return fmt.Errorf("%d agent(s) failed to uninstall", len(failed)) + } + return nil +} + +func logUninstallAgentOp(cfgPath string, names []string, removed, failed int, dryRun bool, start time.Time) { + status := "ok" + if failed > 0 && removed > 0 { + status = "partial" + } else if failed > 0 { + status = "error" + } + e := oplog.NewEntry("uninstall", status, time.Since(start)) + e.Args = map[string]any{ + "resource_kind": "agent", + "names": names, + "removed": removed, + "failed": failed, + "dry_run": dryRun, + } + oplog.WriteWithLimit(cfgPath, oplog.OpsFile, e, logMaxEntries()) //nolint:errcheck +} diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 84ba0b2f..3617e516 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -132,7 +132,15 @@ func cmdUpdate(args []string) error { // Extract kind filter (e.g. "skillshare update agents") kind, rest := parseKindArg(rest) - _ = kind // TODO: wire agent-only update path + + // Agent-only update: skip skill update entirely + if kind == kindAgents { + cfg, loadErr := config.Load() + if loadErr != nil { + return loadErr + } + return cmdUpdateAgents(rest, cfg, start) + } if mode == modeProject { // Parse opts for logging (cmdUpdateProject parses again internally) diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go new file mode 100644 index 00000000..61cb3236 --- /dev/null +++ b/cmd/skillshare/update_agents.go @@ -0,0 +1,274 @@ +package main + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "time" + + "skillshare/internal/check" + "skillshare/internal/config" + "skillshare/internal/install" + "skillshare/internal/oplog" + "skillshare/internal/ui" +) + +// cmdUpdateAgents handles "skillshare update agents [name|--all]". +func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { + opts, showHelp, parseErr := parseUpdateAgentArgs(args) + if showHelp { + printUpdateHelp() + return nil + } + if parseErr != nil { + return parseErr + } + + agentsDir := cfg.EffectiveAgentsSource() + if _, err := os.Stat(agentsDir); err != nil { + if os.IsNotExist(err) { + ui.Info("No agents source directory (%s)", agentsDir) + return nil + } + return fmt.Errorf("cannot access agents source: %w", err) + } + + // Discover agents and check status + results := check.CheckAgents(agentsDir) + if len(results) == 0 { + ui.Info("No agents found") + return nil + } + + // Filter by name if specified + if len(opts.names) > 0 { + results = filterAgentCheckResults(results, opts.names) + if len(results) == 0 { + return fmt.Errorf("no matching agents found: %s", strings.Join(opts.names, ", ")) + } + } + + // Only check agents that have remote sources + var tracked []check.AgentCheckResult + for _, r := range results { + if r.Source != "" { + tracked = append(tracked, r) + } + } + + if len(tracked) == 0 { + ui.Info("No tracked agents to update (all are local)") + return nil + } + + // Enrich with remote status + if !opts.jsonOutput { + sp := ui.StartSpinner(fmt.Sprintf("Checking %d agent(s) for updates...", len(tracked))) + check.EnrichAgentResultsWithRemote(tracked, func() { sp.Success("Check complete") }) + } else { + check.EnrichAgentResultsWithRemote(tracked, nil) + } + + // Find agents with updates available + var updatable []check.AgentCheckResult + for _, r := range tracked { + if r.Status == "update_available" { + updatable = append(updatable, r) + } + } + + if len(updatable) == 0 { + if !opts.jsonOutput { + ui.Success("All agents are up to date") + } + if opts.jsonOutput { + return updateAgentsOutputJSON(nil, opts.dryRun, start, nil) + } + return nil + } + + if !opts.jsonOutput { + ui.Header("Updating agents") + if opts.dryRun { + ui.Warning("Dry run mode - no changes will be made") + } + } + + // Update each agent by re-installing from its source + var updated, failed int + for _, r := range updatable { + if opts.dryRun { + if !opts.jsonOutput { + ui.Info(" %s: update available from %s", r.Name, r.Source) + } + continue + } + + err := reinstallAgent(agentsDir, r) + if err != nil { + if !opts.jsonOutput { + ui.Error(" %s: update failed: %v", r.Name, err) + } + failed++ + } else { + if !opts.jsonOutput { + ui.Success(" %s: updated", r.Name) + } + updated++ + } + } + + if !opts.jsonOutput && !opts.dryRun { + fmt.Println() + ui.Info("Agent update: %d updated, %d failed", updated, failed) + } + + logUpdateAgentOp(config.ConfigPath(), len(updatable), updated, failed, opts.dryRun, start) + + if opts.jsonOutput { + return updateAgentsOutputJSON(updatable, opts.dryRun, start, nil) + } + + if failed > 0 { + return fmt.Errorf("%d agent(s) failed to update", failed) + } + return nil +} + +// reinstallAgent re-installs an agent from its recorded source. +func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { + metaFile := filepath.Join(agentsDir, r.Name+".skillshare-meta.json") + + // Read current metadata + metaData, err := os.ReadFile(metaFile) + if err != nil { + return fmt.Errorf("cannot read metadata: %w", err) + } + var meta install.SkillMeta + if err := json.Unmarshal(metaData, &meta); err != nil { + return fmt.Errorf("invalid metadata: %w", err) + } + + if meta.Source == "" { + return fmt.Errorf("no source in metadata") + } + + // Parse and re-install from source + source, parseErr := install.ParseSource(meta.Source) + if parseErr != nil { + return fmt.Errorf("invalid source: %w", parseErr) + } + if meta.Branch != "" { + source.Branch = meta.Branch + } + + installOpts := install.InstallOptions{ + Kind: "agent", + AgentNames: []string{r.Name}, + Force: true, + Update: true, + } + + _, installErr := install.Install(source, agentsDir, installOpts) + return installErr +} + +// updateAgentArgs holds parsed arguments for agent update. +type updateAgentArgs struct { + names []string + all bool + dryRun bool + jsonOutput bool +} + +func parseUpdateAgentArgs(args []string) (*updateAgentArgs, bool, error) { + opts := &updateAgentArgs{} + for i := 0; i < len(args); i++ { + arg := args[i] + switch { + case arg == "--all": + opts.all = true + case arg == "--dry-run" || arg == "-n": + opts.dryRun = true + case arg == "--json": + opts.jsonOutput = true + case arg == "--group" || arg == "-G": + return nil, false, fmt.Errorf("--group is not supported for agents") + case arg == "--help" || arg == "-h": + return nil, true, nil + case strings.HasPrefix(arg, "-"): + return nil, false, fmt.Errorf("unknown option: %s", arg) + default: + opts.names = append(opts.names, arg) + } + } + + if !opts.all && len(opts.names) == 0 { + return nil, false, fmt.Errorf("specify agent name(s) or --all") + } + if opts.all && len(opts.names) > 0 { + return nil, false, fmt.Errorf("--all cannot be used with agent names") + } + + return opts, false, nil +} + +func filterAgentCheckResults(results []check.AgentCheckResult, names []string) []check.AgentCheckResult { + nameSet := make(map[string]bool, len(names)) + for _, n := range names { + nameSet[n] = true + } + var filtered []check.AgentCheckResult + for _, r := range results { + if nameSet[r.Name] { + filtered = append(filtered, r) + } + } + return filtered +} + +func logUpdateAgentOp(cfgPath string, total, updated, failed int, dryRun bool, start time.Time) { + status := "ok" + if failed > 0 && updated > 0 { + status = "partial" + } else if failed > 0 { + status = "error" + } + e := oplog.NewEntry("update", status, time.Since(start)) + e.Args = map[string]any{ + "resource_kind": "agent", + "agents_total": total, + "agents_updated": updated, + "agents_failed": failed, + "dry_run": dryRun, + } + oplog.WriteWithLimit(cfgPath, oplog.OpsFile, e, logMaxEntries()) //nolint:errcheck +} + +func updateAgentsOutputJSON(updatable []check.AgentCheckResult, dryRun bool, start time.Time, err error) error { + type agentUpdateJSON struct { + Name string `json:"name"` + Source string `json:"source,omitempty"` + Status string `json:"status"` + } + var items []agentUpdateJSON + for _, r := range updatable { + items = append(items, agentUpdateJSON{ + Name: r.Name, + Source: r.Source, + Status: r.Status, + }) + } + output := struct { + Agents []agentUpdateJSON `json:"agents"` + DryRun bool `json:"dry_run"` + Duration string `json:"duration"` + }{ + Agents: items, + DryRun: dryRun, + Duration: formatDuration(start), + } + return writeJSONResult(&output, err) +} diff --git a/internal/trash/trash.go b/internal/trash/trash.go index fde6e651..bb979209 100644 --- a/internal/trash/trash.go +++ b/internal/trash/trash.go @@ -247,6 +247,51 @@ func Restore(entry *TrashEntry, destDir string) error { return nil } +// RestoreAgent restores agent files from a trashed directory back to the agent source. +// Unlike Restore (which moves the whole directory), this copies individual files +// from the trashed directory to destDir (since agents are file-based, not directory-based). +func RestoreAgent(entry *TrashEntry, destDir string) error { + if err := os.MkdirAll(destDir, 0755); err != nil { + return fmt.Errorf("failed to create agent destination: %w", err) + } + + // Read files from the trashed directory + entries, err := os.ReadDir(entry.Path) + if err != nil { + return fmt.Errorf("failed to read trashed agent: %w", err) + } + + for _, e := range entries { + if e.IsDir() { + continue + } + srcPath := filepath.Join(entry.Path, e.Name()) + destPath := filepath.Join(destDir, e.Name()) + + if _, statErr := os.Stat(destPath); statErr == nil { + return fmt.Errorf("'%s' already exists in %s", e.Name(), destDir) + } + + // Try rename, fallback to copy + if renameErr := os.Rename(srcPath, destPath); renameErr != nil { + data, readErr := os.ReadFile(srcPath) + if readErr != nil { + return fmt.Errorf("failed to read %s: %w", e.Name(), readErr) + } + if writeErr := os.WriteFile(destPath, data, 0644); writeErr != nil { + return fmt.Errorf("failed to write %s: %w", e.Name(), writeErr) + } + } + } + + // Remove the trashed directory + if removeErr := os.RemoveAll(entry.Path); removeErr != nil { + return fmt.Errorf("restored but failed to remove trash entry: %w", removeErr) + } + + return nil +} + // parseTrashName splits "skillname_YYYY-MM-DD_HH-MM-SS" into name and timestamp. func parseTrashName(dirName string) (string, string) { // Timestamp format: YYYY-MM-DD_HH-MM-SS (19 chars) diff --git a/tests/integration/agent_crud_test.go b/tests/integration/agent_crud_test.go new file mode 100644 index 00000000..2b4dceb1 --- /dev/null +++ b/tests/integration/agent_crud_test.go @@ -0,0 +1,259 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +// --- update agents --- + +func TestUpdate_Agents_NoAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := createAgentSource(t, sb, nil) + _ = agentsDir + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("update", "agents", "--all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "No agents found") +} + +func TestUpdate_Agents_LocalOnly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("update", "agents", "--all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "local") +} + +func TestUpdate_Agents_GroupNotSupported(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("update", "agents", "--group", "mygroup") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "not supported for agents") +} + +func TestUpdate_Agents_RequiresNameOrAll(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("update", "agents") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "specify agent name") +} + +// --- uninstall agents --- + +func TestUninstall_Agents_RemovesToTrash(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("uninstall", "-g", "agents", "tutor", "--force") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Removed agent") + result.AssertAnyOutputContains(t, "tutor") + + // Verify agent file was removed from source + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); !os.IsNotExist(err) { + t.Error("agent file should be removed from source") + } +} + +func TestUninstall_Agents_NotFound(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, nil) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("uninstall", "-g", "agents", "nonexistent", "--force") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "not found") +} + +func TestUninstall_Agents_All(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + "reviewer.md": "# Reviewer agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("uninstall", "-g", "agents", "--all", "--force") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "2 agent(s) removed") + + // Verify both files removed + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); !os.IsNotExist(err) { + t.Error("tutor.md should be removed") + } + if _, err := os.Stat(filepath.Join(agentsDir, "reviewer.md")); !os.IsNotExist(err) { + t.Error("reviewer.md should be removed") + } +} + +// --- collect agents --- + +func TestCollect_Agents_NoLocalAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + // Sync agents first (creates symlinks) + sb.RunCLI("sync", "agents") + + // Collect should find no local (non-symlinked) agents + result := sb.RunCLI("collect", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "No local agents") +} + +func TestCollect_Agents_CollectsLocalFiles(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, nil) + claudeAgents := createAgentTarget(t, sb, "claude") + + // Create a local (non-symlinked) agent in the target + os.WriteFile(filepath.Join(claudeAgents, "local-agent.md"), []byte("# Local agent"), 0644) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + agentsSource := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + + result := sb.RunCLI("collect", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "collected") + + // Verify the file was copied to agent source + if _, err := os.Stat(filepath.Join(agentsSource, "local-agent.md")); err != nil { + t.Error("local-agent.md should be collected to agent source") + } +} + +// --- trash agents --- + +func TestTrash_Agents_ListEmpty(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("trash", "agents", "list", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "empty") +} + +func TestTrash_Agents_ListAfterUninstall(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Uninstall to trash + sb.RunCLI("uninstall", "-g", "agents", "tutor", "--force") + + // List agent trash + result := sb.RunCLI("trash", "agents", "list", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") +} + +func TestTrash_Agents_Restore(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Uninstall + sb.RunCLI("uninstall", "-g", "agents", "tutor", "--force") + + // Verify removed + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); !os.IsNotExist(err) { + t.Fatal("should be removed after uninstall") + } + + // Restore + result := sb.RunCLI("trash", "agents", "restore", "tutor") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Restored") + + // Verify restored to agent source + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); err != nil { + t.Error("tutor.md should be restored to agent source") + } +} + +// --- default behavior unchanged --- + +func TestTrash_Default_SkillsOnly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Default trash list should check skill trash (not agent trash) + result := sb.RunCLI("trash", "list", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "empty") +} diff --git a/tests/integration/agent_list_sync_test.go b/tests/integration/agent_list_sync_test.go new file mode 100644 index 00000000..d5da5a46 --- /dev/null +++ b/tests/integration/agent_list_sync_test.go @@ -0,0 +1,290 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +// createAgentSource creates an agents source directory with the given agents. +// Each key is the filename (e.g., "tutor.md"), value is the content. +func createAgentSource(t *testing.T, sb *testutil.Sandbox, agents map[string]string) string { + t.Helper() + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + if err := os.MkdirAll(agentsDir, 0755); err != nil { + t.Fatalf("failed to create agents dir: %v", err) + } + for name, content := range agents { + if err := os.WriteFile(filepath.Join(agentsDir, name), []byte(content), 0644); err != nil { + t.Fatalf("failed to write agent %s: %v", name, err) + } + } + return agentsDir +} + +// createAgentTarget creates an agent target directory for the given target name. +func createAgentTarget(t *testing.T, sb *testutil.Sandbox, name string) string { + t.Helper() + var path string + switch name { + case "claude": + path = filepath.Join(sb.Home, ".claude", "agents") + case "cursor": + path = filepath.Join(sb.Home, ".cursor", "agents") + case "opencode": + path = filepath.Join(sb.Home, ".config", "opencode", "agents") + default: + path = filepath.Join(sb.Home, "."+name, "agents") + } + if err := os.MkdirAll(path, 0755); err != nil { + t.Fatalf("failed to create agent target: %v", err) + } + return path +} + +// --- list agents --- + +func TestList_Agents_Empty(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, nil) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("list", "agents", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "No agents installed") +} + +func TestList_Agents_ShowsAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + "reviewer.md": "# Reviewer agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("list", "agents", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") + result.AssertAnyOutputContains(t, "reviewer") + result.AssertAnyOutputContains(t, "Installed agents") +} + +func TestList_Agents_JSON_IncludesKind(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("list", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"kind"`) + result.AssertAnyOutputContains(t, `"agent"`) + result.AssertAnyOutputContains(t, `"tutor"`) +} + +func TestList_All_MixedOutput(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("list", "all", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"skill"`) + result.AssertAnyOutputContains(t, `"agent"`) +} + +func TestList_Default_SkillsOnly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Default list should NOT include agents + result := sb.RunCLI("list", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"my-skill"`) + result.AssertOutputNotContains(t, `"tutor"`) +} + +// --- sync agents --- + +func TestSync_Agents_CreatesSymlinks(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Syncing agents") + + // Verify symlink was created + linkPath := filepath.Join(claudeAgents, "tutor.md") + if _, err := os.Lstat(linkPath); err != nil { + t.Errorf("expected agent symlink at %s, got error: %v", linkPath, err) + } +} + +func TestSync_Agents_DryRun(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "agents", "--dry-run") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Dry run") + + // Verify NO symlink was created + linkPath := filepath.Join(claudeAgents, "tutor.md") + if _, err := os.Lstat(linkPath); !os.IsNotExist(err) { + t.Error("expected no agent symlink in dry-run mode") + } +} + +func TestSync_Default_SkillsOnly_NoAgentSync(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + + claudeSkills := sb.CreateTarget("claude") + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + claudeSkills + ` + agents: + path: ` + claudeAgents + ` +`) + + // Default sync should only sync skills, NOT agents + result := sb.RunCLI("sync") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "Syncing agents") + + // Skill symlink should exist + if _, err := os.Lstat(filepath.Join(claudeSkills, "my-skill")); err != nil { + t.Error("expected skill symlink") + } + // Agent symlink should NOT exist + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); !os.IsNotExist(err) { + t.Error("expected no agent symlink from default sync") + } +} + +func TestSync_All_SyncsSkillsAndAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + + claudeSkills := sb.CreateTarget("claude") + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + claudeSkills + ` + agents: + path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Syncing skills") + result.AssertAnyOutputContains(t, "Syncing agents") + + // Both should be synced + if _, err := os.Lstat(filepath.Join(claudeSkills, "my-skill")); err != nil { + t.Error("expected skill symlink") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); err != nil { + t.Error("expected agent symlink") + } +} + +// --- parseKindArg "all" keyword --- + +func TestSync_All_PositionalKeyword(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + claudeSkills := sb.CreateTarget("claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + claudeSkills + ` +`) + + // "sync all" should still sync skills even without agents configured + result := sb.RunCLI("sync", "all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Syncing skills") +} diff --git a/tests/integration/install_agent_test.go b/tests/integration/install_agent_test.go index 39547e8b..fe964b0e 100644 --- a/tests/integration/install_agent_test.go +++ b/tests/integration/install_agent_test.go @@ -116,7 +116,7 @@ targets: {} } } -func TestUninstall_KindAgent_ParsesFlag(t *testing.T) { +func TestUninstall_AgentsPositional_ParsesCorrectly(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() @@ -124,8 +124,7 @@ func TestUninstall_KindAgent_ParsesFlag(t *testing.T) { targets: {} `) - // --kind agent with nonexistent agent — should error with agent not found - result := sb.RunCLI("uninstall", "-g", "--kind", "agent", "nonexistent") - // May fail because agent doesn't exist, but should parse flags correctly (no "unknown option") + // Positional "agents" with nonexistent agent — should parse correctly (no "unknown option") + result := sb.RunCLI("uninstall", "-g", "agents", "nonexistent") result.AssertOutputNotContains(t, "unknown option") } diff --git a/tests/integration/log_test.go b/tests/integration/log_test.go index 225f75cc..481e609d 100644 --- a/tests/integration/log_test.go +++ b/tests/integration/log_test.go @@ -252,16 +252,18 @@ func TestLog_SyncPartialStatus(t *testing.T) { goodTarget := sb.CreateTarget("claude") - // Create the broken target as a valid directory (passes validation), - // then make it read-only so sync fails when trying to write symlinks. - brokenTarget := filepath.Join(sb.Home, "broken-target", "skills") - if err := os.MkdirAll(brokenTarget, 0755); err != nil { - t.Fatalf("failed to create broken target: %v", err) + // Create a broken target that passes validation but fails during sync. + // A dangling symlink makes os.Stat return "not exist" (validation passes) + // but os.MkdirAll fails because the symlink entry blocks directory creation. + // This works even as root (unlike chmod-based approaches). + brokenParent := filepath.Join(sb.Home, "broken-target") + if err := os.MkdirAll(brokenParent, 0755); err != nil { + t.Fatalf("failed to create broken parent: %v", err) } - if err := os.Chmod(brokenTarget, 0444); err != nil { - t.Fatalf("failed to chmod broken target: %v", err) + brokenTarget := filepath.Join(brokenParent, "skills") + if err := os.Symlink("/nonexistent/dangling/target", brokenTarget); err != nil { + t.Fatalf("failed to create dangling symlink: %v", err) } - t.Cleanup(func() { os.Chmod(brokenTarget, 0755) }) sb.WriteConfig(`source: ` + sb.SourcePath + ` mode: merge From c2b9a1537db013c27ecf4f855e95e66de7c6221b Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 10:47:46 +0800 Subject: [PATCH 046/205] feat(agents): wire kind filter into status, diff, doctor (Phase 3) status: - "status agents" shows agent source + per-target agent status (path, linked, drift) - "status all" shows both skills and agents - JSON output includes "agents" section with source/count/targets - Default "status" unchanged (skills only) diff: - Kind field populated in JSON output ("skill" or "agent") - parseKindArg wired into cmdDiff dispatch - diffItemToJSON helper centralizes Kind defaulting doctor: - New checkAgentTargets: validates agent target paths, detects broken symlinks and drift (expected vs linked count) - Checks run after existing agent source check 8 new integration tests, all passing. --- cmd/skillshare/diff.go | 38 ++-- cmd/skillshare/diff_project.go | 2 +- cmd/skillshare/doctor.go | 2 + cmd/skillshare/doctor_agents.go | 110 ++++++++++ cmd/skillshare/status.go | 138 +++++++------ cmd/skillshare/status_agents.go | 123 +++++++++++ tests/integration/agent_observability_test.go | 191 ++++++++++++++++++ 7 files changed, 527 insertions(+), 77 deletions(-) create mode 100644 cmd/skillshare/doctor_agents.go create mode 100644 cmd/skillshare/status_agents.go create mode 100644 tests/integration/agent_observability_test.go diff --git a/cmd/skillshare/diff.go b/cmd/skillshare/diff.go index 9ccc172d..af031b31 100644 --- a/cmd/skillshare/diff.go +++ b/cmd/skillshare/diff.go @@ -73,6 +73,9 @@ func cmdDiff(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare diff agents"). + kind, rest := parseKindArg(rest) + scope := "global" cfgPath := config.ConfigPath() if mode == modeProject { @@ -105,9 +108,9 @@ func cmdDiff(args []string) error { var cmdErr error if mode == modeProject { - cmdErr = cmdDiffProject(cwd, targetName, opts, start) + cmdErr = cmdDiffProject(cwd, targetName, kind, opts, start) } else { - cmdErr = cmdDiffGlobal(targetName, opts, start) + cmdErr = cmdDiffGlobal(targetName, kind, opts, start) } logDiffOp(cfgPath, targetName, scope, 0, start, cmdErr) return cmdErr @@ -147,6 +150,7 @@ type targetDiffResult struct { type copyDiffEntry struct { action string // "add", "modify", "remove" name string + kind string // "skill" or "agent" (empty defaults to "skill") reason string isSync bool // true = needs sync, false = local-only files []fileDiffEntry // file-level diffs (nil until populated) @@ -358,7 +362,7 @@ func (dp *diffProgress) stop() { } } -func cmdDiffGlobal(targetName string, opts diffRenderOpts, start time.Time) error { +func cmdDiffGlobal(targetName string, kind resourceKindFilter, opts diffRenderOpts, start time.Time) error { cfg, err := config.Load() if err != nil { return err @@ -483,6 +487,20 @@ func cmdDiffGlobal(targetName string, opts diffRenderOpts, start time.Time) erro return nil } +func diffItemToJSON(item copyDiffEntry) diffJSONItem { + k := item.kind + if k == "" { + k = "skill" + } + return diffJSONItem{ + Action: item.action, + Name: item.name, + Kind: k, + Reason: item.reason, + IsSync: item.isSync, + } +} + func diffOutputJSON(results []targetDiffResult, start time.Time) error { output := diffJSONOutput{ Duration: formatDuration(start), @@ -497,12 +515,7 @@ func diffOutputJSON(results []targetDiffResult, start time.Time) error { Exclude: r.exclude, } for _, item := range r.items { - jt.Items = append(jt.Items, diffJSONItem{ - Action: item.action, - Name: item.name, - Reason: item.reason, - IsSync: item.isSync, - }) + jt.Items = append(jt.Items, diffItemToJSON(item)) } output.Targets = append(output.Targets, jt) } @@ -529,12 +542,7 @@ func diffOutputJSONWithExtras(results []targetDiffResult, extrasResults []extraD Exclude: r.exclude, } for _, item := range r.items { - jt.Items = append(jt.Items, diffJSONItem{ - Action: item.action, - Name: item.name, - Reason: item.reason, - IsSync: item.isSync, - }) + jt.Items = append(jt.Items, diffItemToJSON(item)) } o.Targets = append(o.Targets, jt) } diff --git a/cmd/skillshare/diff_project.go b/cmd/skillshare/diff_project.go index 689d93c7..dd95ef4f 100644 --- a/cmd/skillshare/diff_project.go +++ b/cmd/skillshare/diff_project.go @@ -10,7 +10,7 @@ import ( "skillshare/internal/ui" ) -func cmdDiffProject(root, targetName string, opts diffRenderOpts, start time.Time) error { +func cmdDiffProject(root, targetName string, kind resourceKindFilter, opts diffRenderOpts, start time.Time) error { if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { return err diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index 9700e57a..e238bc47 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -228,6 +228,8 @@ func runDoctorChecks(cfg *config.Config, result *doctorResult, isProject bool) { checkGitStatus(cfg.Source, result) } + checkAgentTargets(cfg, result) + fmt.Println() // visual break before skill validation checkSkillsValidity(cfg.Source, result, discovered) checkSkillIntegrity(result, discovered) diff --git a/cmd/skillshare/doctor_agents.go b/cmd/skillshare/doctor_agents.go new file mode 100644 index 00000000..6e7722fc --- /dev/null +++ b/cmd/skillshare/doctor_agents.go @@ -0,0 +1,110 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "skillshare/internal/config" + "skillshare/internal/resource" + "skillshare/internal/ui" +) + +// checkAgentTargets validates agent target paths, broken links, and drift. +func checkAgentTargets(cfg *config.Config, result *doctorResult) { + agentsSource := cfg.EffectiveAgentsSource() + if _, err := os.Stat(agentsSource); err != nil { + return // no agents source → skip target checks + } + + agents, discErr := resource.AgentKind{}.Discover(agentsSource) + if discErr != nil || len(agents) == 0 { + return + } + + builtinAgents := config.DefaultAgentTargets() + + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + info, err := os.Stat(agentPath) + if err != nil { + if os.IsNotExist(err) { + ui.Info("Agent target %s: %s (not created)", name, agentPath) + result.addCheck("agent_target_"+name, checkPass, + fmt.Sprintf("Agent target %s: not created yet", name), nil) + continue + } + ui.Error("Agent target %s: %v", name, err) + result.addError() + result.addCheck("agent_target_"+name, checkError, + fmt.Sprintf("Agent target %s: %v", name, err), nil) + continue + } + + if !info.IsDir() { + ui.Error("Agent target %s: not a directory: %s", name, agentPath) + result.addError() + result.addCheck("agent_target_"+name, checkError, + fmt.Sprintf("Agent target %s: path is not a directory", name), nil) + continue + } + + // Count linked agents and check for broken links + linked, broken := countAgentLinksAndBroken(agentPath) + if broken > 0 { + ui.Warning("Agent target %s: %d broken link(s)", name, broken) + result.addWarning() + result.addCheck("agent_target_"+name, checkWarning, + fmt.Sprintf("Agent target %s: %d linked, %d broken", name, linked, broken), nil) + continue + } + + if linked != len(agents) { + ui.Warning("Agent target %s: drift (%d/%d linked)", name, linked, len(agents)) + result.addWarning() + result.addCheck("agent_target_"+name, checkWarning, + fmt.Sprintf("Agent target %s: drift (%d/%d agents linked)", name, linked, len(agents)), nil) + continue + } + + ui.Success("Agent target %s: %s (%d agents)", name, agentPath, linked) + result.addCheck("agent_target_"+name, checkPass, + fmt.Sprintf("Agent target %s: %d agents synced", name, linked), nil) + } +} + +// countAgentLinksAndBroken counts .md symlinks and broken symlinks in a directory. +func countAgentLinksAndBroken(dir string) (linked, broken int) { + entries, err := os.ReadDir(dir) + if err != nil { + return 0, 0 + } + for _, e := range entries { + if e.IsDir() { + continue + } + if !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + continue + } + fullPath := filepath.Join(dir, e.Name()) + fi, lErr := os.Lstat(fullPath) + if lErr != nil { + continue + } + if fi.Mode()&os.ModeSymlink == 0 { + continue + } + // It's a symlink — check if target exists + if _, statErr := os.Stat(fullPath); statErr != nil { + broken++ + } else { + linked++ + } + } + return linked, broken +} diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index 1412d4a7..7016beed 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -19,12 +19,13 @@ import ( // statusJSONOutput is the JSON representation for status --json output. type statusJSONOutput struct { - Source statusJSONSource `json:"source"` - SkillCount int `json:"skill_count"` - TrackedRepos []statusJSONRepo `json:"tracked_repos"` - Targets []statusJSONTarget `json:"targets"` - Audit statusJSONAudit `json:"audit"` - Version string `json:"version"` + Source statusJSONSource `json:"source"` + SkillCount int `json:"skill_count"` + TrackedRepos []statusJSONRepo `json:"tracked_repos"` + Targets []statusJSONTarget `json:"targets"` + Agents *statusJSONAgents `json:"agents,omitempty"` + Audit statusJSONAudit `json:"audit"` + Version string `json:"version"` } type statusJSONSource struct { @@ -90,6 +91,9 @@ func cmdStatus(args []string) error { applyModeLabel(mode) + // Extract kind filter (e.g. "skillshare status agents"). + kind, rest := parseKindArg(rest) + jsonOutput := hasFlag(rest, "--json") if mode == modeProject { @@ -111,78 +115,90 @@ func cmdStatus(args []string) error { } if !jsonOutput { - sp := ui.StartSpinner("Discovering skills...") - discovered, stats, discoverErr := sync.DiscoverSourceSkillsWithStats(cfg.Source) - if discoverErr != nil { - discovered = nil - } - trackedRepos := extractTrackedRepos(discovered) - sp.Stop() + if kind.IncludesSkills() { + sp := ui.StartSpinner("Discovering skills...") + discovered, stats, discoverErr := sync.DiscoverSourceSkillsWithStats(cfg.Source) + if discoverErr != nil { + discovered = nil + } + trackedRepos := extractTrackedRepos(discovered) + sp.Stop() - printSourceStatus(cfg, len(discovered), stats) - printTrackedReposStatus(cfg, discovered, trackedRepos) - if err := printTargetsStatus(cfg, discovered); err != nil { - return err + printSourceStatus(cfg, len(discovered), stats) + printTrackedReposStatus(cfg, discovered, trackedRepos) + if err := printTargetsStatus(cfg, discovered); err != nil { + return err + } + + // Extras + if len(cfg.Extras) > 0 { + ui.Header("Extras") + printExtrasStatus(cfg.Extras, func(extra config.ExtraConfig) string { + return config.ResolveExtrasSourceDir(extra, cfg.ExtrasSource, cfg.Source) + }) + } + + printAuditStatus(cfg.Audit) } - // Extras - if len(cfg.Extras) > 0 { - ui.Header("Extras") - printExtrasStatus(cfg.Extras, func(extra config.ExtraConfig) string { - return config.ResolveExtrasSourceDir(extra, cfg.ExtrasSource, cfg.Source) - }) + if kind.IncludesAgents() { + printAgentStatus(cfg) } - printAuditStatus(cfg.Audit) - checkSkillVersion(cfg) + if kind.IncludesSkills() { + checkSkillVersion(cfg) + } return nil } // JSON mode - discovered, stats, _ := sync.DiscoverSourceSkillsWithStats(cfg.Source) - trackedRepos := extractTrackedRepos(discovered) - output := statusJSONOutput{ - Source: statusJSONSource{ + Version: version, + } + + if kind.IncludesSkills() { + discovered, stats, _ := sync.DiscoverSourceSkillsWithStats(cfg.Source) + trackedRepos := extractTrackedRepos(discovered) + + output.Source = statusJSONSource{ Path: cfg.Source, Exists: dirExists(cfg.Source), Skillignore: buildSkillignoreJSON(stats), - }, - SkillCount: len(discovered), - Version: version, - } - - // Tracked repos (parallel dirty checks) - output.TrackedRepos = buildTrackedRepoJSON(cfg.Source, trackedRepos, discovered) + } + output.SkillCount = len(discovered) + output.TrackedRepos = buildTrackedRepoJSON(cfg.Source, trackedRepos, discovered) + + for name, target := range cfg.Targets { + sc := target.SkillsConfig() + tMode := getTargetMode(sc.Mode, cfg.Mode) + res := getTargetStatusDetail(target, cfg.Source, tMode) + output.Targets = append(output.Targets, statusJSONTarget{ + Name: name, + Path: sc.Path, + Mode: tMode, + Status: res.statusStr, + SyncedCount: res.syncedCount, + Include: sc.Include, + Exclude: sc.Exclude, + }) + } - // Targets - for name, target := range cfg.Targets { - sc := target.SkillsConfig() - tMode := getTargetMode(sc.Mode, cfg.Mode) - res := getTargetStatusDetail(target, cfg.Source, tMode) - output.Targets = append(output.Targets, statusJSONTarget{ - Name: name, - Path: sc.Path, - Mode: tMode, - Status: res.statusStr, - SyncedCount: res.syncedCount, - Include: sc.Include, - Exclude: sc.Exclude, + policy := audit.ResolvePolicy(audit.PolicyInputs{ + ConfigProfile: cfg.Audit.Profile, + ConfigThreshold: cfg.Audit.BlockThreshold, + ConfigDedupe: cfg.Audit.DedupeMode, + ConfigAnalyzers: cfg.Audit.EnabledAnalyzers, }) + output.Audit = statusJSONAudit{ + Profile: string(policy.Profile), + Threshold: policy.Threshold, + Dedupe: string(policy.DedupeMode), + Analyzers: policy.EffectiveAnalyzers(), + } } - // Audit - policy := audit.ResolvePolicy(audit.PolicyInputs{ - ConfigProfile: cfg.Audit.Profile, - ConfigThreshold: cfg.Audit.BlockThreshold, - ConfigDedupe: cfg.Audit.DedupeMode, - ConfigAnalyzers: cfg.Audit.EnabledAnalyzers, - }) - output.Audit = statusJSONAudit{ - Profile: string(policy.Profile), - Threshold: policy.Threshold, - Dedupe: string(policy.DedupeMode), - Analyzers: policy.EffectiveAnalyzers(), + if kind.IncludesAgents() { + output.Agents = buildAgentStatusJSON(cfg) } return writeJSON(&output) diff --git a/cmd/skillshare/status_agents.go b/cmd/skillshare/status_agents.go new file mode 100644 index 00000000..eb1ed8f2 --- /dev/null +++ b/cmd/skillshare/status_agents.go @@ -0,0 +1,123 @@ +package main + +import ( + "os" + "path/filepath" + "strings" + + "skillshare/internal/config" + "skillshare/internal/resource" + "skillshare/internal/ui" +) + +// statusJSONAgents is the agent section of status --json output. +type statusJSONAgents struct { + Source string `json:"source"` + Exists bool `json:"exists"` + Count int `json:"count"` + Targets []statusJSONAgentTarget `json:"targets,omitempty"` +} + +type statusJSONAgentTarget struct { + Name string `json:"name"` + Path string `json:"path"` + Expected int `json:"expected"` + Linked int `json:"linked"` + Drift bool `json:"drift"` +} + +// printAgentStatus prints agent source and per-target agent status (text mode). +func printAgentStatus(cfg *config.Config) { + agentsSource := cfg.EffectiveAgentsSource() + + ui.Header("Agents") + + exists := dirExists(agentsSource) + if !exists { + ui.Info("Source: %s (not created)", agentsSource) + return + } + + agents, _ := resource.AgentKind{}.Discover(agentsSource) + ui.Info("Source: %s (%d agents)", agentsSource, len(agents)) + + // Per-target agent status + builtinAgents := config.DefaultAgentTargets() + var targets []string + for name := range cfg.Targets { + targets = append(targets, name) + } + + for _, name := range targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + linked := countLinkedAgents(agentPath) + driftLabel := "" + if linked != len(agents) && len(agents) > 0 { + driftLabel = ui.Yellow + " (drift)" + ui.Reset + } + ui.Info(" %s: %s (%d/%d linked)%s", name, agentPath, linked, len(agents), driftLabel) + } +} + +// buildAgentStatusJSON builds the agents section for status --json output. +func buildAgentStatusJSON(cfg *config.Config) *statusJSONAgents { + agentsSource := cfg.EffectiveAgentsSource() + exists := dirExists(agentsSource) + + result := &statusJSONAgents{ + Source: agentsSource, + Exists: exists, + } + + if !exists { + return result + } + + agents, _ := resource.AgentKind{}.Discover(agentsSource) + result.Count = len(agents) + + builtinAgents := config.DefaultAgentTargets() + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + linked := countLinkedAgents(agentPath) + result.Targets = append(result.Targets, statusJSONAgentTarget{ + Name: name, + Path: agentPath, + Expected: len(agents), + Linked: linked, + Drift: linked != len(agents) && len(agents) > 0, + }) + } + + return result +} + +// countLinkedAgents counts .md symlinks in the target agent directory. +func countLinkedAgents(targetDir string) int { + entries, err := os.ReadDir(targetDir) + if err != nil { + return 0 + } + count := 0 + for _, e := range entries { + if e.IsDir() { + continue + } + if !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + continue + } + fullPath := filepath.Join(targetDir, e.Name()) + if info, lErr := os.Lstat(fullPath); lErr == nil && info.Mode()&os.ModeSymlink != 0 { + count++ + } + } + return count +} diff --git a/tests/integration/agent_observability_test.go b/tests/integration/agent_observability_test.go new file mode 100644 index 00000000..44af886e --- /dev/null +++ b/tests/integration/agent_observability_test.go @@ -0,0 +1,191 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +// --- status agents --- + +func TestStatus_Agents_ShowsAgentInfo(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + "reviewer.md": "# Reviewer agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("status", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Agents") + result.AssertAnyOutputContains(t, "2 agents") +} + +func TestStatus_Agents_JSON_IncludesAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("status", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"agents"`) + result.AssertAnyOutputContains(t, `"count"`) +} + +func TestStatus_Default_NoAgentSection(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("status") + result.AssertSuccess(t) + // Default status should NOT include agent section + result.AssertOutputNotContains(t, "Agents") +} + +func TestStatus_All_ShowsBoth(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("status", "all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Source") // skill section + result.AssertAnyOutputContains(t, "Agents") // agent section +} + +// --- diff agents --- + +func TestDiff_Agents_JSON_IncludesKind(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + claudeSkills := sb.CreateTarget("claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + claudeSkills + ` +`) + + // Diff before sync should show items with kind field + result := sb.RunCLI("diff", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"kind"`) + result.AssertAnyOutputContains(t, `"skill"`) +} + +// --- doctor agents --- + +func TestDoctor_ChecksAgentSource(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("doctor") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Agents source") + result.AssertAnyOutputContains(t, "1 agents") +} + +func TestDoctor_AgentTargetDrift(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + "reviewer.md": "# Reviewer agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + // Only sync one agent manually (create symlink for tutor only) + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.Symlink( + filepath.Join(agentsDir, "tutor.md"), + filepath.Join(claudeAgents, "tutor.md"), + ) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("doctor") + result.AssertSuccess(t) + // Should detect drift (1/2 linked) + result.AssertAnyOutputContains(t, "drift") +} + +func TestDoctor_AgentTargetSynced(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + // Sync agents first + sb.RunCLI("sync", "agents") + + result := sb.RunCLI("doctor") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "1 agents") + result.AssertOutputNotContains(t, "drift") +} From 95c46ca8223ac8e7c1ab99383c5b42557c72a01f Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 10:56:22 +0800 Subject: [PATCH 047/205] feat(agents): wire backup/restore and update CLI help text (Phase 4) backup/restore: - "backup agents" backs up agent target paths (entry name: -agents) - "restore agents " restores from agent backup - Default backup unchanged (skills only) help text: - Updated 9 commands with agents positional in Usage and Examples: list, sync, update, uninstall, collect, trash, backup, restore, status, diff 5 new integration tests (backup/restore round-trip, dry-run, default unchanged, project mode rejected), all passing. --- cmd/skillshare/backup.go | 28 ++++- cmd/skillshare/backup_agents.go | 94 +++++++++++++++++ cmd/skillshare/collect.go | 5 +- cmd/skillshare/diff.go | 5 +- cmd/skillshare/list.go | 6 +- cmd/skillshare/status.go | 6 +- cmd/skillshare/sync.go | 6 +- cmd/skillshare/trash.go | 6 +- cmd/skillshare/uninstall.go | 5 +- cmd/skillshare/update.go | 7 +- tests/integration/agent_backup_test.go | 135 +++++++++++++++++++++++++ 11 files changed, 284 insertions(+), 19 deletions(-) create mode 100644 cmd/skillshare/backup_agents.go create mode 100644 tests/integration/agent_backup_test.go diff --git a/cmd/skillshare/backup.go b/cmd/skillshare/backup.go index 3b6da1e1..7c056853 100644 --- a/cmd/skillshare/backup.go +++ b/cmd/skillshare/backup.go @@ -25,6 +25,9 @@ func cmdBackup(args []string) error { return fmt.Errorf("backup is not supported in project mode") } + // Extract kind filter (e.g. "skillshare backup agents"). + kind, args := parseKindArg(args) + start := time.Now() var targetName string doList := false @@ -63,7 +66,11 @@ func cmdBackup(args []string) error { return backupCleanup() } - err = createBackup(targetName, dryRun) + if kind == kindAgents { + err = createAgentBackup(targetName, dryRun) + } else { + err = createBackup(targetName, dryRun) + } if !dryRun { e := oplog.NewEntry("backup", statusFromErr(err), time.Since(start)) @@ -326,7 +333,11 @@ func cmdRestore(args []string) error { return fmt.Errorf("restore is not supported in project mode") } + // Extract kind filter (e.g. "skillshare restore agents"). + kind, args := parseKindArg(args) + start := time.Now() + _ = start // used below var targetName string var fromTimestamp string @@ -357,6 +368,11 @@ func cmdRestore(args []string) error { } } + // Agent restore uses agent-specific backup entries (name suffixed with "-agents") + if kind == kindAgents { + return restoreAgentBackup(targetName, fromTimestamp, force, dryRun) + } + // No target specified → TUI dispatch (or plain text fallback) if targetName == "" && fromTimestamp == "" && !dryRun { return restoreTUIDispatch(noTUI) @@ -537,7 +553,7 @@ func previewRestoreFromLatest(targetName, targetPath string, opts backup.Restore } func printBackupHelp() { - fmt.Println(`Usage: skillshare backup [target] [options] + fmt.Println(`Usage: skillshare backup [agents] [target] [options] Create a snapshot of target skill directories. Without arguments, backs up all targets. @@ -557,11 +573,12 @@ Examples: skillshare backup claude # Backup only claude skillshare backup --list # List all backups skillshare backup --cleanup # Remove old backups - skillshare backup --cleanup --dry-run # Preview cleanup`) + skillshare backup --cleanup --dry-run # Preview cleanup + skillshare backup agents # Backup all agent targets`) } func printRestoreHelp() { - fmt.Println(`Usage: skillshare restore [target] [options] + fmt.Println(`Usage: skillshare restore [agents] [target] [options] Restore target skills from a backup snapshot. Without arguments, launches an interactive TUI. @@ -581,5 +598,6 @@ Examples: skillshare restore claude # Restore claude from latest backup skillshare restore claude --from 2024-01-15_14-30-45 skillshare restore claude --dry-run # Preview restore - skillshare restore --no-tui # List backups (no TUI)`) + skillshare restore --no-tui # List backups (no TUI) + skillshare restore agents claude # Restore agents claude target`) } diff --git a/cmd/skillshare/backup_agents.go b/cmd/skillshare/backup_agents.go new file mode 100644 index 00000000..e1a3c67c --- /dev/null +++ b/cmd/skillshare/backup_agents.go @@ -0,0 +1,94 @@ +package main + +import ( + "fmt" + + "skillshare/internal/backup" + "skillshare/internal/config" + "skillshare/internal/ui" +) + +// createAgentBackup backs up agent target directories. +// Agent backups use "-agents" as the backup entry name. +func createAgentBackup(targetName string, dryRun bool) error { + cfg, err := config.Load() + if err != nil { + return err + } + + builtinAgents := config.DefaultAgentTargets() + ui.Header("Creating agent backup") + if dryRun { + ui.Warning("Dry run mode - no backups will be created") + } + + created := 0 + for name := range cfg.Targets { + if targetName != "" && name != targetName { + continue + } + + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + entryName := name + "-agents" + + if dryRun { + ui.Info("%s: would backup agents from %s", entryName, agentPath) + continue + } + + backupPath, backupErr := backup.Create(entryName, agentPath) + if backupErr != nil { + ui.Warning("Failed to backup %s: %v", entryName, backupErr) + continue + } + if backupPath != "" { + ui.StepDone(entryName, backupPath) + created++ + } else { + ui.StepSkip(entryName, "nothing to backup") + } + } + + if created == 0 && !dryRun { + ui.Info("No agent targets to backup") + } + + return nil +} + +// restoreAgentBackup restores agent target directories from backup. +func restoreAgentBackup(targetName, fromTimestamp string, force, dryRun bool) error { + if targetName == "" { + return fmt.Errorf("usage: skillshare restore agents [--from ] [--force] [--dry-run]") + } + + cfg, err := config.Load() + if err != nil { + return err + } + + builtinAgents := config.DefaultAgentTargets() + agentPath := resolveAgentTargetPath(cfg.Targets[targetName], builtinAgents, targetName) + if agentPath == "" { + return fmt.Errorf("target '%s' has no agent path configured", targetName) + } + + entryName := targetName + "-agents" + ui.Header(fmt.Sprintf("Restoring agents for %s", targetName)) + + if dryRun { + ui.Warning("Dry run mode - no changes will be made") + ui.Info("Would restore %s to %s", entryName, agentPath) + return nil + } + + opts := backup.RestoreOptions{Force: force} + if fromTimestamp != "" { + return restoreFromTimestamp(entryName, agentPath, fromTimestamp, opts) + } + return restoreFromLatest(entryName, agentPath, opts) +} diff --git a/cmd/skillshare/collect.go b/cmd/skillshare/collect.go index 76afabcb..add9943a 100644 --- a/cmd/skillshare/collect.go +++ b/cmd/skillshare/collect.go @@ -309,7 +309,7 @@ func showCollectNextSteps(source string) { } func printCollectHelp() { - fmt.Println(`Usage: skillshare collect [target] [options] + fmt.Println(`Usage: skillshare collect [agents] [target] [options] Collect local skills from target(s) to source directory. @@ -328,5 +328,6 @@ Options: Examples: skillshare collect claude Collect from Claude target skillshare collect --all Collect from all targets - skillshare collect --dry-run Preview what would be collected`) + skillshare collect --dry-run Preview what would be collected + skillshare collect agents Collect from agents targets`) } diff --git a/cmd/skillshare/diff.go b/cmd/skillshare/diff.go index af031b31..c8c188c4 100644 --- a/cmd/skillshare/diff.go +++ b/cmd/skillshare/diff.go @@ -1050,7 +1050,7 @@ func pluralS(n int) string { } func printDiffHelp() { - fmt.Println(`Usage: skillshare diff [target] [options] + fmt.Println(`Usage: skillshare diff [agents|all] [target] [options] Show differences between source skills and target directories. Previews what 'sync' would change without modifying anything. @@ -1072,5 +1072,6 @@ Examples: skillshare diff claude # Diff a single target skillshare diff -p # Diff project-mode targets skillshare diff --stat # Show file-level stat - skillshare diff --patch # Show full text diff`) + skillshare diff --patch # Show full text diff + skillshare diff agents # Diff agents targets only`) } diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 20f0fb2f..a38605f2 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -785,7 +785,7 @@ func abbreviateSource(source string) string { } func printListHelp() { - fmt.Println(`Usage: skillshare list [pattern] [options] + fmt.Println(`Usage: skillshare list [agents|all] [pattern] [options] List all installed skills in the source directory. An optional pattern filters skills by name, path, or source (case-insensitive). @@ -806,5 +806,7 @@ Examples: skillshare list --type local skillshare list react --type github --sort newest skillshare list --json | jq '.[].name' - skillshare list --verbose`) + skillshare list --verbose + skillshare list agents # List agents only + skillshare list all # List skills + agents`) } diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index 7016beed..e45b6ba9 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -531,7 +531,7 @@ func checkSkillVersion(cfg *config.Config) { } func printStatusHelp() { - fmt.Println(`Usage: skillshare status [options] + fmt.Println(`Usage: skillshare status [agents|all] [options] Show status of source, skills, and all targets. @@ -544,5 +544,7 @@ Options: Examples: skillshare status Show current state skillshare status --json Output as JSON - skillshare status -p Show project status`) + skillshare status -p Show project status + skillshare status agents Show agents status only + skillshare status all Show skills + agents status`) } diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index dd6762f3..f3f0cf1b 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -783,7 +783,7 @@ func syncSymlinkMode(name string, target config.TargetConfig, source string, dry } func printSyncHelp() { - fmt.Println(`Usage: skillshare sync [options] + fmt.Println(`Usage: skillshare sync [agents|all] [options] Sync skills from source to all configured targets. @@ -803,5 +803,7 @@ Examples: skillshare sync Sync skills to all targets skillshare sync --dry-run Preview sync changes skillshare sync --all Sync skills and extras - skillshare sync -p Sync project-level skills`) + skillshare sync -p Sync project-level skills + skillshare sync agents Sync agents only + skillshare sync all Sync skills and agents`) } diff --git a/cmd/skillshare/trash.go b/cmd/skillshare/trash.go index ccdab4e6..04dc2bc1 100644 --- a/cmd/skillshare/trash.go +++ b/cmd/skillshare/trash.go @@ -315,7 +315,7 @@ func logTrashOp(cfgPath string, action string, count int, name string, start tim } func printTrashHelp() { - fmt.Println(`Usage: skillshare trash [options] + fmt.Println(`Usage: skillshare trash [agents] [options] Manage uninstalled skills in the trash. @@ -337,5 +337,7 @@ Examples: skillshare trash restore my-skill # Restore from trash skillshare trash restore my-skill -p # Restore in project mode skillshare trash delete my-skill # Permanently delete from trash - skillshare trash empty # Empty the trash`) + skillshare trash empty # Empty the trash + skillshare trash agents list # List trashed agents + skillshare trash agents restore tutor # Restore an agent from trash`) } diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index a61da93b..1a8dc569 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -1163,6 +1163,7 @@ func logUninstallOp(cfgPath string, names []string, succeeded int, start time.Ti func printUninstallHelp() { fmt.Println(`Usage: skillshare uninstall ... [options] + skillshare uninstall [agents] [options] skillshare uninstall --group [options] skillshare uninstall --all [options] @@ -1198,5 +1199,7 @@ Examples: skillshare uninstall --group frontend -n # Preview group removal skillshare uninstall x -G backend --force # Mix names and groups skillshare uninstall _team-repo # Remove tracked repository - skillshare uninstall team-repo # _ prefix is optional`) + skillshare uninstall team-repo # _ prefix is optional + skillshare uninstall agents tutor # Uninstall an agent + skillshare uninstall agents --all # Uninstall all agents`) } diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 3617e516..9ce8e1f2 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -493,6 +493,7 @@ func logUpdateOp(cfgPath string, names []string, opts *updateOptions, mode strin func printUpdateHelp() { fmt.Println(`Usage: skillshare update ... [options] + skillshare update [agents] [options] skillshare update --group [options] skillshare update --all [options] @@ -540,5 +541,9 @@ Examples: skillshare update --all -T high # Use HIGH threshold for this run skillshare update --all --dry-run # Preview updates skillshare update _team --force # Discard changes and update - skillshare update --all --prune # Update all + remove stale skills`) + skillshare update --all --prune # Update all + remove stale skills + skillshare update agents --all # Update all agents + skillshare update agents tutor # Update a single agent + +Note: --group is not supported for agents.`) } diff --git a/tests/integration/agent_backup_test.go b/tests/integration/agent_backup_test.go new file mode 100644 index 00000000..4f19e749 --- /dev/null +++ b/tests/integration/agent_backup_test.go @@ -0,0 +1,135 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +func TestBackup_Agents_CreatesBackup(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + // Sync agents first so there's something to backup + sb.RunCLI("sync", "agents") + + result := sb.RunCLI("backup", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "agent backup") +} + +func TestBackup_Agents_DryRun(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + sb.RunCLI("sync", "agents") + + result := sb.RunCLI("backup", "agents", "--dry-run") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Dry run") +} + +func TestBackup_Agents_RestoreRoundTrip(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + // Sync then backup + sb.RunCLI("sync", "agents") + sb.RunCLI("backup", "agents") + + // Verify symlink exists + linkPath := filepath.Join(claudeAgents, "tutor.md") + if _, err := os.Lstat(linkPath); err != nil { + t.Fatalf("expected agent symlink at %s", linkPath) + } + + // Delete the agent from target + os.Remove(linkPath) + if _, err := os.Lstat(linkPath); !os.IsNotExist(err) { + t.Fatal("symlink should be removed") + } + + // Restore + result := sb.RunCLI("restore", "agents", "claude", "--force") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Restored") +} + +func TestBackup_Default_DoesNotBackupAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + claudeSkills := sb.CreateTarget("claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + claudeSkills + ` +`) + + // Default backup should only backup skills, not mention agents + result := sb.RunCLI("backup") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "agent") +} + +func TestRestore_Agents_ProjectModeRejected(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("restore", "-p", "agents", "claude") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "not supported in project mode") +} From eceb9cc6e491be2233e22e4af52da930d7c7e20d Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 11:04:01 +0800 Subject: [PATCH 048/205] =?UTF-8?q?test(agents):=20fill=20coverage=20gaps?= =?UTF-8?q?=20=E2=80=94=20project=20mode=20uninstall,=20trash=20empty/dele?= =?UTF-8?q?te,=20multi-target,=20JSON=20kind?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 7 new integration tests covering: - trash agents empty / delete - uninstall agents in project mode (fix: wire kind into project dispatch) - check all (combined skill + agent) - sync agents skips targets without agent path - list all --json verifies kind field on every entry - status agents --json with target details Total agent tests: 45 (38 + 7). --- cmd/skillshare/uninstall.go | 10 + tests/integration/agent_coverage_gaps_test.go | 196 ++++++++++++++++++ 2 files changed, 206 insertions(+) create mode 100644 tests/integration/agent_coverage_gaps_test.go diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 1a8dc569..2ba18b9f 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -554,6 +554,16 @@ func cmdUninstall(args []string) error { kind, rest := parseKindArg(rest) if mode == modeProject { + if kind == kindAgents { + agentsDir := filepath.Join(cwd, ".skillshare", "agents") + opts, _, _ := parseUninstallArgs(rest) + if opts == nil { + opts = &uninstallOptions{skillNames: rest} + } + opts.force = opts.force || opts.jsonOutput + err := cmdUninstallAgents(agentsDir, opts, config.ProjectConfigPath(cwd), start) + return err + } err := cmdUninstallProject(rest, cwd) logUninstallOp(config.ProjectConfigPath(cwd), uninstallOpNames(rest), 0, start, err) return err diff --git a/tests/integration/agent_coverage_gaps_test.go b/tests/integration/agent_coverage_gaps_test.go new file mode 100644 index 00000000..c334ad49 --- /dev/null +++ b/tests/integration/agent_coverage_gaps_test.go @@ -0,0 +1,196 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +// --- trash agents empty --- + +func TestTrash_Agents_Empty(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + "reviewer.md": "# Reviewer agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Uninstall both agents to trash + sb.RunCLI("uninstall", "-g", "agents", "--all", "--force") + + // Verify trash has items + listResult := sb.RunCLI("trash", "agents", "list", "--no-tui") + listResult.AssertSuccess(t) + listResult.AssertAnyOutputContains(t, "tutor") + + // Empty agent trash (use --force via input "y") + emptyResult := sb.RunCLIWithInput("y\n", "trash", "agents", "empty") + emptyResult.AssertSuccess(t) + emptyResult.AssertAnyOutputContains(t, "Emptied trash") + + // Verify trash is now empty + afterResult := sb.RunCLI("trash", "agents", "list", "--no-tui") + afterResult.AssertSuccess(t) + afterResult.AssertAnyOutputContains(t, "empty") +} + +// --- trash agents delete --- + +func TestTrash_Agents_Delete(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + sb.RunCLI("uninstall", "-g", "agents", "tutor", "--force") + + // Delete specific item from agent trash + result := sb.RunCLI("trash", "agents", "delete", "tutor") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Permanently deleted") +} + +// --- uninstall agents project mode --- + +func TestUninstall_Agents_ProjectMode(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + // Setup project + projectDir := filepath.Join(sb.Root, "myproject") + os.MkdirAll(filepath.Join(projectDir, ".skillshare", "skills"), 0755) + agentsDir := filepath.Join(projectDir, ".skillshare", "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor agent"), 0644) + + // Write project config + projectCfgDir := filepath.Join(projectDir, ".skillshare") + os.WriteFile(filepath.Join(projectCfgDir, "config.yaml"), []byte("targets:\n - claude\n"), 0644) + + // Also need global config for the CLI to not error + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLIInDir(projectDir, "uninstall", "-p", "agents", "tutor", "--force") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Removed agent") + + // Verify removed + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); !os.IsNotExist(err) { + t.Error("agent should be removed from project agents dir") + } +} + +// --- check all (combined) --- + +func TestCheck_All_CombinedOutput(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // "check all" should show both skills and agents + // Currently "check" defaults to skills-only, "check agents" is agents-only + // "check all" should combine both + result := sb.RunCLI("check", "all") + result.AssertSuccess(t) +} + +// --- multi-target agent config --- + +func TestSync_Agents_SkipsTargetsWithoutAgentPath(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + // claude has agent path, cursor does NOT + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` + cursor: + skills: + path: ` + sb.CreateTarget("cursor") + ` +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Claude agents should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); err != nil { + t.Error("claude agent should be synced") + } +} + +// --- list agents JSON with kind field --- + +func TestList_Agents_JSON_AllEntriesHaveKind(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // "list all --json" should have kind on every entry + result := sb.RunCLI("list", "all", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"kind": "skill"`) + result.AssertAnyOutputContains(t, `"kind": "agent"`) +} + +// --- status agents JSON with targets --- + +func TestStatus_Agents_JSON_WithTargets(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + // Sync agents + sb.RunCLI("sync", "agents") + + result := sb.RunCLI("status", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"agents"`) + result.AssertAnyOutputContains(t, `"expected"`) + result.AssertAnyOutputContains(t, `"linked"`) +} From 586f0f577375ce2bba108304830a8ca0bf314cf4 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 11:11:30 +0800 Subject: [PATCH 049/205] fix(agents): correct project-mode agent dispatch for all commands Fixes 3 high-severity bugs found by Codex adversarial review: 1. update -p agents: was writing to global agents instead of project. Fix: check project mode BEFORE agent branch, add cmdUpdateAgentsProject. 2. sync -p agents: was silently running skill sync. Fix: add syncAgentsProject with project config/target resolution. sync -p all: now runs skill sync then project agent sync. 3. uninstall -p agents: was searching .skillshare/skills instead of agents. Fix: already addressed in 22c6c61e, verified still correct. Additional hardening for commands without project agent support: - collect -p agents: returns explicit "not yet supported" error - status -p agents: returns explicit error - check -p agents: returns explicit error - diff -p agents: returns explicit error - audit -p agents: returns explicit error Principle: never silently fall back to skills when agents is requested. --- cmd/skillshare/audit.go | 3 + cmd/skillshare/check.go | 3 + cmd/skillshare/collect.go | 3 + cmd/skillshare/diff_project.go | 3 + cmd/skillshare/status.go | 4 ++ cmd/skillshare/sync.go | 14 +++++ cmd/skillshare/sync_agents.go | 108 ++++++++++++++++++++++++++++++++ cmd/skillshare/update.go | 5 +- cmd/skillshare/update_agents.go | 88 ++++++++++++++++++++++++++ 9 files changed, 230 insertions(+), 1 deletion(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index 4cf20e4e..e421785d 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -183,6 +183,9 @@ func cmdAudit(args []string) error { cfgPath = config.ConfigPath() } } else if mode == modeProject { + if kind == kindAgents { + return fmt.Errorf("audit agents is not yet supported in project mode") + } rt, err := loadProjectRuntime(cwd) if err != nil { return err diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index bc7f1b24..14497cfa 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -191,6 +191,9 @@ func cmdCheck(args []string) error { cfgPath := config.ConfigPath() if mode == modeProject { cfgPath = config.ProjectConfigPath(cwd) + if kind == kindAgents { + return fmt.Errorf("check agents is not yet supported in project mode") + } cmdErr := cmdCheckProject(cwd, opts) logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, cmdErr) return cmdErr diff --git a/cmd/skillshare/collect.go b/cmd/skillshare/collect.go index add9943a..bc917238 100644 --- a/cmd/skillshare/collect.go +++ b/cmd/skillshare/collect.go @@ -86,6 +86,9 @@ func cmdCollect(args []string) error { kind, rest := parseKindArg(rest) if mode == modeProject { + if kind == kindAgents { + return fmt.Errorf("collect agents is not yet supported in project mode") + } err := cmdCollectProject(rest, cwd) logCollectOp(config.ProjectConfigPath(cwd), start, err) return err diff --git a/cmd/skillshare/diff_project.go b/cmd/skillshare/diff_project.go index dd95ef4f..7ec3f111 100644 --- a/cmd/skillshare/diff_project.go +++ b/cmd/skillshare/diff_project.go @@ -11,6 +11,9 @@ import ( ) func cmdDiffProject(root, targetName string, kind resourceKindFilter, opts diffRenderOpts, start time.Time) error { + if kind == kindAgents { + return fmt.Errorf("diff agents is not yet supported in project mode") + } if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { return err diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index e45b6ba9..223a8e29 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -103,6 +103,10 @@ func cmdStatus(args []string) error { return fmt.Errorf("unexpected arguments: %v", rest) } } + if kind == kindAgents { + // Agent-only project status is not yet implemented + return fmt.Errorf("status agents is not yet supported in project mode") + } if jsonOutput { return cmdStatusProjectJSON(cwd) } diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index f3f0cf1b..8daf1594 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -114,6 +114,11 @@ func cmdSync(args []string) error { } if mode == modeProject { + // Agent-only project sync + if kind == kindAgents { + return syncAgentsProject(cwd, dryRun, force, jsonOutput, start) + } + if hasAll && !jsonOutput { // Run project extras sync after project skills sync (text mode) defer func() { @@ -123,9 +128,18 @@ func cmdSync(args []string) error { } }() } + stats, results, projIgnoreStats, err := cmdSyncProject(cwd, dryRun, force, jsonOutput) stats.ProjectScope = true logSyncOp(config.ProjectConfigPath(cwd), stats, start, err) + + // Append agent sync when kind=all + if kind == kindAll { + if agentErr := syncAgentsProject(cwd, dryRun, force, jsonOutput, start); agentErr != nil && err == nil { + err = agentErr + } + } + if jsonOutput { if hasAll { projCfg, loadErr := config.LoadProject(cwd) diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index a5efddd4..6be20883 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -3,6 +3,7 @@ package main import ( "fmt" "os" + "path/filepath" "time" "skillshare/internal/config" @@ -126,6 +127,113 @@ func resolveAgentTargetPath(tc config.TargetConfig, builtinAgents map[string]con return "" } +// syncAgentsProject syncs agents for project mode using .skillshare/agents/ as source +// and project-level target agent paths. +func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start time.Time) error { + agentsSource := filepath.Join(projectRoot, ".skillshare", "agents") + + if _, err := os.Stat(agentsSource); err != nil { + if os.IsNotExist(err) { + if !jsonOutput { + ui.Info("No project agents directory (%s)", agentsSource) + } + return nil + } + return fmt.Errorf("cannot access project agents: %w", err) + } + + agents, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil { + return fmt.Errorf("cannot discover project agents: %w", err) + } + + if len(agents) == 0 { + if !jsonOutput { + ui.Info("No project agents found") + } + return nil + } + + if !jsonOutput { + ui.Header("Syncing project agents") + if dryRun { + ui.Warning("Dry run mode - no changes will be made") + } + } + + builtinAgents := config.ProjectAgentTargets() + var totals agentSyncStats + var syncErr error + + // Load project config for target list + projCfg, loadErr := config.LoadProject(projectRoot) + if loadErr != nil { + return fmt.Errorf("cannot load project config: %w", loadErr) + } + + for _, entry := range projCfg.Targets { + var agentPath string + ac := entry.AgentsConfig() + if ac.Path != "" { + agentPath = ac.Path + if !filepath.IsAbs(agentPath) { + agentPath = filepath.Join(projectRoot, agentPath) + } + } else if builtin, ok := builtinAgents[entry.Name]; ok { + agentPath = config.ExpandPath(builtin.Path) + } + if agentPath == "" { + continue + } + + mode := ac.Mode + if mode == "" { + mode = "merge" + } + + result, syncResultErr := sync.SyncAgents(agents, agentsSource, agentPath, mode, dryRun, force) + if syncResultErr != nil { + if !jsonOutput { + ui.Error("%s: agent sync failed: %v", entry.Name, syncResultErr) + } + syncErr = fmt.Errorf("some agent targets failed to sync") + continue + } + + var pruned []string + switch mode { + case "copy": + pruned, _ = sync.PruneOrphanAgentCopies(agentPath, agents, dryRun) + case "merge": + pruned, _ = sync.PruneOrphanAgentLinks(agentPath, agents, dryRun) + } + + stats := agentSyncStats{ + linked: len(result.Linked), + skipped: len(result.Skipped), + updated: len(result.Updated), + pruned: len(pruned), + } + totals.linked += stats.linked + totals.skipped += stats.skipped + totals.updated += stats.updated + totals.pruned += stats.pruned + + if !jsonOutput { + reportAgentSyncResult(entry.Name, mode, stats, dryRun) + } + } + + if !jsonOutput { + fmt.Println() + ui.Info("Project agent sync: %d linked, %d local, %d updated, %d pruned (%s)", + totals.linked, totals.skipped, totals.updated, totals.pruned, + formatDuration(start)) + } + + return syncErr +} + // reportAgentSyncResult prints per-target agent sync status. func reportAgentSyncResult(name, mode string, stats agentSyncStats, dryRun bool) { if stats.linked > 0 || stats.updated > 0 || stats.pruned > 0 { diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 9ce8e1f2..704b2beb 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -133,8 +133,11 @@ func cmdUpdate(args []string) error { // Extract kind filter (e.g. "skillshare update agents") kind, rest := parseKindArg(rest) - // Agent-only update: skip skill update entirely + // Agent-only update: dispatch to correct scope if kind == kindAgents { + if mode == modeProject { + return cmdUpdateAgentsProject(rest, cwd, start) + } cfg, loadErr := config.Load() if loadErr != nil { return loadErr diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index 61cb3236..195d6370 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -272,3 +272,91 @@ func updateAgentsOutputJSON(updatable []check.AgentCheckResult, dryRun bool, sta } return writeJSONResult(&output, err) } + +// cmdUpdateAgentsProject handles "skillshare update -p agents [name|--all]". +func cmdUpdateAgentsProject(args []string, projectRoot string, start time.Time) error { + agentsDir := filepath.Join(projectRoot, ".skillshare", "agents") + if _, err := os.Stat(agentsDir); err != nil { + if os.IsNotExist(err) { + ui.Info("No project agents directory (%s)", agentsDir) + return nil + } + return fmt.Errorf("cannot access project agents: %w", err) + } + + opts, showHelp, parseErr := parseUpdateAgentArgs(args) + if showHelp { + printUpdateHelp() + return nil + } + if parseErr != nil { + return parseErr + } + + results := check.CheckAgents(agentsDir) + if len(results) == 0 { + ui.Info("No project agents found") + return nil + } + + if len(opts.names) > 0 { + results = filterAgentCheckResults(results, opts.names) + if len(results) == 0 { + return fmt.Errorf("no matching agents found: %s", strings.Join(opts.names, ", ")) + } + } + + var tracked []check.AgentCheckResult + for _, r := range results { + if r.Source != "" { + tracked = append(tracked, r) + } + } + + if len(tracked) == 0 { + ui.Info("No tracked project agents to update (all are local)") + return nil + } + + sp := ui.StartSpinner(fmt.Sprintf("Checking %d agent(s)...", len(tracked))) + check.EnrichAgentResultsWithRemote(tracked, func() { sp.Success("Check complete") }) + + var updatable []check.AgentCheckResult + for _, r := range tracked { + if r.Status == "update_available" { + updatable = append(updatable, r) + } + } + + if len(updatable) == 0 { + ui.Success("All project agents are up to date") + return nil + } + + ui.Header("Updating project agents") + if opts.dryRun { + ui.Warning("Dry run mode") + for _, r := range updatable { + ui.Info(" %s: update available from %s", r.Name, r.Source) + } + return nil + } + + var updated, failed int + for _, r := range updatable { + if err := reinstallAgent(agentsDir, r); err != nil { + ui.Error(" %s: %v", r.Name, err) + failed++ + } else { + ui.Success(" %s: updated", r.Name) + updated++ + } + } + + logUpdateAgentOp(config.ProjectConfigPath(projectRoot), len(updatable), updated, failed, opts.dryRun, start) + + if failed > 0 { + return fmt.Errorf("%d agent(s) failed to update", failed) + } + return nil +} From 075c0191b7aee977ca67f6d5821bd31c2cce7a45 Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 11:21:37 +0800 Subject: [PATCH 050/205] feat(agents): implement project-mode agent support for all commands MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove all "not yet supported in project mode" errors. Every command that supports agents in global mode now also supports project mode: - status -p agents: shows project agent source + per-target status - status -p all: shows both skills and agents - check -p agents: checks agent status from .skillshare/agents/ - diff -p agents: computes agent diffs (missing/orphan per target) - diff agents (global): also now fully implemented via diffGlobalAgents - collect -p agents: collects local agents from project targets - audit -p agents: scans .skillshare/agents/ for security patterns New file: diff_agents.go — shared agent diff logic for both modes (computeAgentDiff, diffProjectAgents, diffGlobalAgents). 11 new integration tests covering all 5 commands in project mode. Total agent tests: 56. --- cmd/skillshare/audit.go | 9 +- cmd/skillshare/check.go | 30 ++- cmd/skillshare/collect.go | 2 +- cmd/skillshare/collect_agents.go | 48 +++++ cmd/skillshare/diff.go | 5 + cmd/skillshare/diff_agents.go | 160 ++++++++++++++ cmd/skillshare/diff_project.go | 2 +- cmd/skillshare/status.go | 8 +- cmd/skillshare/status_project.go | 211 +++++++++++++------ tests/integration/agent_project_mode_test.go | 206 ++++++++++++++++++ 10 files changed, 607 insertions(+), 74 deletions(-) create mode 100644 cmd/skillshare/diff_agents.go create mode 100644 tests/integration/agent_project_mode_test.go diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index e421785d..ea4fd1bb 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -183,14 +183,15 @@ func cmdAudit(args []string) error { cfgPath = config.ConfigPath() } } else if mode == modeProject { - if kind == kindAgents { - return fmt.Errorf("audit agents is not yet supported in project mode") - } rt, err := loadProjectRuntime(cwd) if err != nil { return err } - sourcePath = rt.sourcePath + if kind == kindAgents { + sourcePath = rt.agentsSourcePath + } else { + sourcePath = rt.sourcePath + } projectRoot = cwd defaultThreshold = rt.config.Audit.BlockThreshold configProfile = rt.config.Audit.Profile diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 14497cfa..0d510202 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -192,7 +192,35 @@ func cmdCheck(args []string) error { if mode == modeProject { cfgPath = config.ProjectConfigPath(cwd) if kind == kindAgents { - return fmt.Errorf("check agents is not yet supported in project mode") + agentsDir := filepath.Join(cwd, ".skillshare", "agents") + agentResults := check.CheckAgents(agentsDir) + if opts.json { + out, _ := json.MarshalIndent(agentResults, "", " ") + fmt.Println(string(out)) + } else { + ui.Header(ui.WithModeLabel("Checking agents")) + ui.StepStart("Agents source", agentsDir) + if len(agentResults) == 0 { + ui.Info("No agents found") + } else { + fmt.Println() + for _, r := range agentResults { + switch r.Status { + case "up_to_date": + ui.ListItem("success", r.Name, "up to date") + case "drifted": + ui.ListItem("warning", r.Name, r.Message) + case "local": + ui.ListItem("info", r.Name, "local agent") + case "error": + ui.ListItem("error", r.Name, r.Message) + } + } + } + fmt.Println() + } + logCheckOp(cfgPath, 0, len(agentResults), 0, 0, scope, start, nil) + return nil } cmdErr := cmdCheckProject(cwd, opts) logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, cmdErr) diff --git a/cmd/skillshare/collect.go b/cmd/skillshare/collect.go index bc917238..f0dc9de8 100644 --- a/cmd/skillshare/collect.go +++ b/cmd/skillshare/collect.go @@ -87,7 +87,7 @@ func cmdCollect(args []string) error { if mode == modeProject { if kind == kindAgents { - return fmt.Errorf("collect agents is not yet supported in project mode") + return cmdCollectProjectAgents(cwd) } err := cmdCollectProject(rest, cwd) logCollectOp(config.ProjectConfigPath(cwd), start, err) diff --git a/cmd/skillshare/collect_agents.go b/cmd/skillshare/collect_agents.go index 9c6fe1fc..cbbb3ffd 100644 --- a/cmd/skillshare/collect_agents.go +++ b/cmd/skillshare/collect_agents.go @@ -3,6 +3,7 @@ package main import ( "fmt" "os" + "path/filepath" "time" "skillshare/internal/config" @@ -63,3 +64,50 @@ func cmdCollectAgents(cfg *config.Config, dryRun, jsonOutput bool, start time.Ti return nil } + +// cmdCollectProjectAgents collects non-symlinked agent .md files from project targets. +func cmdCollectProjectAgents(projectRoot string) error { + agentsSource := filepath.Join(projectRoot, ".skillshare", "agents") + if err := os.MkdirAll(agentsSource, 0755); err != nil { + return fmt.Errorf("cannot create project agents directory: %w", err) + } + + projCfg, err := config.LoadProject(projectRoot) + if err != nil { + return fmt.Errorf("cannot load project config: %w", err) + } + + builtinAgents := config.ProjectAgentTargets() + var allCollected []string + + ui.Header(ui.WithModeLabel("Collect agents")) + + for _, entry := range projCfg.Targets { + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, projectRoot) + if agentPath == "" { + continue + } + if _, statErr := os.Stat(agentPath); statErr != nil { + continue + } + + collected, collectErr := sync.CollectAgents(agentPath, agentsSource, false, os.Stdout) + if collectErr != nil { + ui.Warning("%s: collect failed: %v", entry.Name, collectErr) + continue + } + if len(collected) > 0 { + allCollected = append(allCollected, collected...) + ui.Success("%s: collected %d agent(s)", entry.Name, len(collected)) + } + } + + if len(allCollected) == 0 { + ui.Info("No local agents found to collect") + } else { + fmt.Println() + ui.Info("Collected %d agent(s) to %s", len(allCollected), agentsSource) + } + + return nil +} diff --git a/cmd/skillshare/diff.go b/cmd/skillshare/diff.go index c8c188c4..86b1677d 100644 --- a/cmd/skillshare/diff.go +++ b/cmd/skillshare/diff.go @@ -368,6 +368,11 @@ func cmdDiffGlobal(targetName string, kind resourceKindFilter, opts diffRenderOp return err } + // Agent-only diff + if kind == kindAgents { + return diffGlobalAgents(cfg, targetName, opts, start) + } + var spinner *ui.Spinner if !opts.jsonOutput { spinner = ui.StartSpinner("Discovering skills") diff --git a/cmd/skillshare/diff_agents.go b/cmd/skillshare/diff_agents.go new file mode 100644 index 00000000..19c8d654 --- /dev/null +++ b/cmd/skillshare/diff_agents.go @@ -0,0 +1,160 @@ +package main + +import ( + "os" + "path/filepath" + "strings" + "time" + + "skillshare/internal/config" + "skillshare/internal/resource" + "skillshare/internal/ui" +) + +// diffProjectAgents computes agent diffs for project mode. +func diffProjectAgents(root, targetName string, opts diffRenderOpts, start time.Time) error { + if !projectConfigExists(root) { + if err := performProjectInit(root, projectInitOptions{}); err != nil { + return err + } + } + + rt, err := loadProjectRuntime(root) + if err != nil { + return err + } + + agentsSource := rt.agentsSourcePath + agents, _ := resource.AgentKind{}.Discover(agentsSource) + + builtinAgents := config.ProjectAgentTargets() + var results []targetDiffResult + + for _, entry := range rt.config.Targets { + if targetName != "" && entry.Name != targetName { + continue + } + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, root) + if agentPath == "" { + continue + } + + r := computeAgentDiff(entry.Name, agentPath, agents) + results = append(results, r) + } + + if opts.jsonOutput { + return diffOutputJSON(results, start) + } + + if len(results) == 0 { + ui.Info("No agent-capable targets found") + return nil + } + + renderGroupedDiffs(results, opts) + return nil +} + +// diffGlobalAgents computes agent diffs for global mode. +func diffGlobalAgents(cfg *config.Config, targetName string, opts diffRenderOpts, start time.Time) error { + agentsSource := cfg.EffectiveAgentsSource() + agents, _ := resource.AgentKind{}.Discover(agentsSource) + + builtinAgents := config.DefaultAgentTargets() + var results []targetDiffResult + + for name := range cfg.Targets { + if targetName != "" && name != targetName { + continue + } + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + + r := computeAgentDiff(name, agentPath, agents) + results = append(results, r) + } + + if opts.jsonOutput { + return diffOutputJSON(results, start) + } + + if len(results) == 0 { + ui.Info("No agent-capable targets found") + return nil + } + + renderGroupedDiffs(results, opts) + return nil +} + +// computeAgentDiff compares source agents against a target directory. +func computeAgentDiff(targetName, targetDir string, agents []resource.DiscoveredResource) targetDiffResult { + r := targetDiffResult{ + name: targetName, + mode: "merge", + synced: true, + } + + // Build map of expected agents + expected := make(map[string]resource.DiscoveredResource, len(agents)) + for _, a := range agents { + expected[a.FlatName] = a + } + + // Check what exists in target + existing := make(map[string]bool) + if entries, err := os.ReadDir(targetDir); err == nil { + for _, e := range entries { + if e.IsDir() || !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + continue + } + existing[e.Name()] = true + } + } + + // Missing in target (need sync) + for flatName := range expected { + if !existing[flatName] { + r.items = append(r.items, copyDiffEntry{ + action: "add", + name: flatName, + kind: "agent", + reason: "not in target", + isSync: true, + }) + r.synced = false + r.syncCount++ + } + } + + // Extra in target (orphans) + for name := range existing { + if _, ok := expected[name]; !ok { + fullPath := filepath.Join(targetDir, name) + fi, _ := os.Lstat(fullPath) + if fi != nil && fi.Mode()&os.ModeSymlink != 0 { + r.items = append(r.items, copyDiffEntry{ + action: "remove", + name: name, + kind: "agent", + reason: "orphan symlink", + isSync: true, + }) + r.synced = false + } else { + r.items = append(r.items, copyDiffEntry{ + action: "local", + name: name, + kind: "agent", + reason: "local file", + }) + r.localCount++ + } + } + } + + return r +} diff --git a/cmd/skillshare/diff_project.go b/cmd/skillshare/diff_project.go index 7ec3f111..84e5d4e2 100644 --- a/cmd/skillshare/diff_project.go +++ b/cmd/skillshare/diff_project.go @@ -12,7 +12,7 @@ import ( func cmdDiffProject(root, targetName string, kind resourceKindFilter, opts diffRenderOpts, start time.Time) error { if kind == kindAgents { - return fmt.Errorf("diff agents is not yet supported in project mode") + return diffProjectAgents(root, targetName, opts, start) } if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index 223a8e29..a00d8bed 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -103,14 +103,10 @@ func cmdStatus(args []string) error { return fmt.Errorf("unexpected arguments: %v", rest) } } - if kind == kindAgents { - // Agent-only project status is not yet implemented - return fmt.Errorf("status agents is not yet supported in project mode") - } if jsonOutput { - return cmdStatusProjectJSON(cwd) + return cmdStatusProjectJSON(cwd, kind) } - return cmdStatusProject(cwd) + return cmdStatusProject(cwd, kind) } cfg, err := config.Load() diff --git a/cmd/skillshare/status_project.go b/cmd/skillshare/status_project.go index 3759a241..c91c07dc 100644 --- a/cmd/skillshare/status_project.go +++ b/cmd/skillshare/status_project.go @@ -9,12 +9,13 @@ import ( "skillshare/internal/audit" "skillshare/internal/config" "skillshare/internal/git" + "skillshare/internal/resource" "skillshare/internal/skillignore" "skillshare/internal/sync" "skillshare/internal/ui" ) -func cmdStatusProject(root string) error { +func cmdStatusProject(root string, kind resourceKindFilter) error { if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { return err @@ -26,34 +27,40 @@ func cmdStatusProject(root string) error { return err } - sp := ui.StartSpinner("Discovering skills...") - discovered, stats, discoverErr := sync.DiscoverSourceSkillsWithStats(runtime.sourcePath) - if discoverErr != nil { - discovered = nil - } - trackedRepos := extractTrackedRepos(discovered) - sp.Stop() + if kind.IncludesSkills() { + sp := ui.StartSpinner("Discovering skills...") + discovered, stats, discoverErr := sync.DiscoverSourceSkillsWithStats(runtime.sourcePath) + if discoverErr != nil { + discovered = nil + } + trackedRepos := extractTrackedRepos(discovered) + sp.Stop() - printProjectSourceStatus(runtime.sourcePath, len(discovered), stats) - printProjectTrackedReposStatus(runtime.sourcePath, discovered, trackedRepos) - if err := printProjectTargetsStatus(runtime, discovered); err != nil { - return err - } + printProjectSourceStatus(runtime.sourcePath, len(discovered), stats) + printProjectTrackedReposStatus(runtime.sourcePath, discovered, trackedRepos) + if err := printProjectTargetsStatus(runtime, discovered); err != nil { + return err + } - // Extras - if len(runtime.config.Extras) > 0 { - ui.Header("Extras (project)") - printExtrasStatus(runtime.config.Extras, func(extra config.ExtraConfig) string { - return config.ExtrasSourceDirProject(root, extra.Name) - }) + // Extras + if len(runtime.config.Extras) > 0 { + ui.Header("Extras (project)") + printExtrasStatus(runtime.config.Extras, func(extra config.ExtraConfig) string { + return config.ExtrasSourceDirProject(root, extra.Name) + }) + } + + printAuditStatus(runtime.config.Audit) } - printAuditStatus(runtime.config.Audit) + if kind.IncludesAgents() { + printProjectAgentStatus(runtime) + } return nil } -func cmdStatusProjectJSON(root string) error { +func cmdStatusProjectJSON(root string, kind resourceKindFilter) error { if !projectConfigExists(root) { if err := performProjectInit(root, projectInitOptions{}); err != nil { return writeJSONError(err) @@ -65,60 +72,142 @@ func cmdStatusProjectJSON(root string) error { return writeJSONError(err) } - discovered, stats, _ := sync.DiscoverSourceSkillsWithStats(runtime.sourcePath) - trackedRepos := extractTrackedRepos(discovered) - output := statusJSONOutput{ - Source: statusJSONSource{ + Version: version, + } + + if kind.IncludesSkills() { + discovered, stats, _ := sync.DiscoverSourceSkillsWithStats(runtime.sourcePath) + trackedRepos := extractTrackedRepos(discovered) + + output.Source = statusJSONSource{ Path: runtime.sourcePath, Exists: dirExists(runtime.sourcePath), Skillignore: buildSkillignoreJSON(stats), - }, - SkillCount: len(discovered), - Version: version, + } + output.SkillCount = len(discovered) + output.TrackedRepos = buildTrackedRepoJSON(runtime.sourcePath, trackedRepos, discovered) + + for _, entry := range runtime.config.Targets { + target, ok := runtime.targets[entry.Name] + if !ok { + continue + } + sc := target.SkillsConfig() + mode := sc.Mode + if mode == "" { + mode = "merge" + } + res := getTargetStatusDetail(target, runtime.sourcePath, mode) + output.Targets = append(output.Targets, statusJSONTarget{ + Name: entry.Name, + Path: sc.Path, + Mode: mode, + Status: res.statusStr, + SyncedCount: res.syncedCount, + Include: sc.Include, + Exclude: sc.Exclude, + }) + } + + policy := audit.ResolvePolicy(audit.PolicyInputs{ + ConfigProfile: runtime.config.Audit.Profile, + ConfigThreshold: runtime.config.Audit.BlockThreshold, + ConfigDedupe: runtime.config.Audit.DedupeMode, + ConfigAnalyzers: runtime.config.Audit.EnabledAnalyzers, + }) + output.Audit = statusJSONAudit{ + Profile: string(policy.Profile), + Threshold: policy.Threshold, + Dedupe: string(policy.DedupeMode), + Analyzers: policy.EffectiveAnalyzers(), + } } - // Tracked repos (parallel dirty checks) - output.TrackedRepos = buildTrackedRepoJSON(runtime.sourcePath, trackedRepos, discovered) + if kind.IncludesAgents() { + output.Agents = buildProjectAgentStatusJSON(runtime) + } - // Targets - for _, entry := range runtime.config.Targets { - target, ok := runtime.targets[entry.Name] - if !ok { + return writeJSON(&output) +} + +// printProjectAgentStatus prints agent status for project mode (text). +func printProjectAgentStatus(rt *projectRuntime) { + ui.Header("Agents (project)") + + exists := dirExists(rt.agentsSourcePath) + if !exists { + ui.Info("Source: .skillshare/agents/ (not created)") + return + } + + agents, _ := resource.AgentKind{}.Discover(rt.agentsSourcePath) + ui.Info("Source: .skillshare/agents/ (%d agents)", len(agents)) + + builtinAgents := config.ProjectAgentTargets() + for _, entry := range rt.config.Targets { + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, rt.root) + if agentPath == "" { continue } - sc := target.SkillsConfig() - mode := sc.Mode - if mode == "" { - mode = "merge" + + linked := countLinkedAgents(agentPath) + driftLabel := "" + if linked != len(agents) && len(agents) > 0 { + driftLabel = ui.Yellow + " (drift)" + ui.Reset } - res := getTargetStatusDetail(target, runtime.sourcePath, mode) - output.Targets = append(output.Targets, statusJSONTarget{ - Name: entry.Name, - Path: sc.Path, - Mode: mode, - Status: res.statusStr, - SyncedCount: res.syncedCount, - Include: sc.Include, - Exclude: sc.Exclude, - }) + ui.Info(" %s: %s (%d/%d linked)%s", entry.Name, agentPath, linked, len(agents), driftLabel) } +} - // Audit - policy := audit.ResolvePolicy(audit.PolicyInputs{ - ConfigProfile: runtime.config.Audit.Profile, - ConfigThreshold: runtime.config.Audit.BlockThreshold, - ConfigDedupe: runtime.config.Audit.DedupeMode, - ConfigAnalyzers: runtime.config.Audit.EnabledAnalyzers, - }) - output.Audit = statusJSONAudit{ - Profile: string(policy.Profile), - Threshold: policy.Threshold, - Dedupe: string(policy.DedupeMode), - Analyzers: policy.EffectiveAnalyzers(), +// buildProjectAgentStatusJSON builds the agents section for project status --json. +func buildProjectAgentStatusJSON(rt *projectRuntime) *statusJSONAgents { + exists := dirExists(rt.agentsSourcePath) + result := &statusJSONAgents{ + Source: rt.agentsSourcePath, + Exists: exists, } - return writeJSON(&output) + if !exists { + return result + } + + agents, _ := resource.AgentKind{}.Discover(rt.agentsSourcePath) + result.Count = len(agents) + + builtinAgents := config.ProjectAgentTargets() + for _, entry := range rt.config.Targets { + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, rt.root) + if agentPath == "" { + continue + } + + linked := countLinkedAgents(agentPath) + result.Targets = append(result.Targets, statusJSONAgentTarget{ + Name: entry.Name, + Path: agentPath, + Expected: len(agents), + Linked: linked, + Drift: linked != len(agents) && len(agents) > 0, + }) + } + + return result +} + +// resolveProjectAgentTargetPath resolves the agent path for a project target entry. +func resolveProjectAgentTargetPath(entry config.ProjectTargetEntry, builtinAgents map[string]config.TargetConfig, projectRoot string) string { + ac := entry.AgentsConfig() + if ac.Path != "" { + if filepath.IsAbs(ac.Path) { + return config.ExpandPath(ac.Path) + } + return filepath.Join(projectRoot, ac.Path) + } + if builtin, ok := builtinAgents[entry.Name]; ok { + return config.ExpandPath(builtin.Path) + } + return "" } func printProjectSourceStatus(sourcePath string, skillCount int, stats *skillignore.IgnoreStats) { diff --git a/tests/integration/agent_project_mode_test.go b/tests/integration/agent_project_mode_test.go new file mode 100644 index 00000000..f7297ba8 --- /dev/null +++ b/tests/integration/agent_project_mode_test.go @@ -0,0 +1,206 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +// setupProjectWithAgents creates a project directory with skills, agents, and config. +// Returns the project root path. +func setupProjectWithAgents(t *testing.T, sb *testutil.Sandbox) string { + t.Helper() + + projectDir := filepath.Join(sb.Root, "myproject") + skillsDir := filepath.Join(projectDir, ".skillshare", "skills") + agentsDir := filepath.Join(projectDir, ".skillshare", "agents") + os.MkdirAll(skillsDir, 0755) + os.MkdirAll(agentsDir, 0755) + + // Create a skill + skillDir := filepath.Join(skillsDir, "my-skill") + os.MkdirAll(skillDir, 0755) + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\nname: my-skill\n---\n# Content"), 0644) + + // Create an agent + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor agent"), 0644) + + // Write project config with a target that has agent path + claudeAgents := filepath.Join(projectDir, ".claude", "agents") + os.MkdirAll(claudeAgents, 0755) + claudeSkills := filepath.Join(projectDir, ".claude", "skills") + os.MkdirAll(claudeSkills, 0755) + + configContent := `targets: + - name: claude + skills: + path: ` + claudeSkills + ` + agents: + path: ` + claudeAgents + ` +` + os.WriteFile(filepath.Join(projectDir, ".skillshare", "config.yaml"), []byte(configContent), 0644) + + // Global config (needed by CLI) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + return projectDir +} + +// --- status -p agents --- + +func TestStatusProject_Agents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "status", "-p", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Agents") + result.AssertAnyOutputContains(t, "1 agents") +} + +func TestStatusProject_Agents_JSON(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "status", "-p", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"agents"`) + result.AssertAnyOutputContains(t, `"count"`) +} + +func TestStatusProject_All(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "status", "-p", "all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Source") // skill section + result.AssertAnyOutputContains(t, "Agents") // agent section +} + +// --- check -p agents --- + +func TestCheckProject_Agents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "check", "-p", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") + result.AssertAnyOutputContains(t, "local") +} + +func TestCheckProject_Agents_JSON(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "check", "-p", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"name"`) + result.AssertAnyOutputContains(t, `"status"`) +} + +// --- diff -p agents --- + +func TestDiffProject_Agents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + // Before sync, diff should show agents as "add" + result := sb.RunCLIInDir(projectDir, "diff", "-p", "agents", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") +} + +func TestDiffProject_Agents_JSON(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "diff", "-p", "agents", "--json") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, `"agent"`) +} + +// --- collect -p agents --- + +func TestCollectProject_Agents_NoLocal(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + // Sync agents first + sb.RunCLIInDir(projectDir, "sync", "-p", "agents") + + // No local agents to collect + result := sb.RunCLIInDir(projectDir, "collect", "-p", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "No local agents") +} + +func TestCollectProject_Agents_CollectsLocal(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + // Create a local agent directly in target (not via sync) + claudeAgents := filepath.Join(projectDir, ".claude", "agents") + os.WriteFile(filepath.Join(claudeAgents, "local-agent.md"), []byte("# Local"), 0644) + + result := sb.RunCLIInDir(projectDir, "collect", "-p", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "collected") + + // Verify copied to project agents source + agentsSource := filepath.Join(projectDir, ".skillshare", "agents") + if _, err := os.Stat(filepath.Join(agentsSource, "local-agent.md")); err != nil { + t.Error("local-agent.md should be collected to project agents source") + } +} + +// --- audit -p agents --- + +func TestAuditProject_Agents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "audit", "-p", "agents") + result.AssertSuccess(t) + // Audit should scan agents, not error + result.AssertOutputNotContains(t, "not yet supported") +} + +// --- default -p (skills only, unchanged) --- + +func TestStatusProject_Default_SkillsOnly(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + result := sb.RunCLIInDir(projectDir, "status", "-p") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Source") + result.AssertOutputNotContains(t, "Agents") +} From 43d6bd19485d01c249a851a52d23e460af64bf2c Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 11:39:06 +0800 Subject: [PATCH 051/205] =?UTF-8?q?refactor(agents):=20simplify=20?= =?UTF-8?q?=E2=80=94=20extract=20shared=20helpers,=20fix=20path=20bug,=20u?= =?UTF-8?q?se=20DirEntry.Type?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit From 3-agent review (reuse, quality, efficiency): Bug fix: - syncAgentsProject: use resolveProjectAgentTargetPath instead of inline path resolution (~ was not expanded for absolute paths) Deduplication: - Extract syncAgentTarget() shared by global + project sync loops (eliminates 30-line copy-paste) - Extract renderAgentCheck() shared by project + global check (eliminates duplicate rendering block) - Consolidate countLinkedAgents → delegates to countAgentLinksAndBroken Efficiency: - Use DirEntry.Type() instead of os.Lstat in diff_agents.go and doctor_agents.go (saves one syscall per .md file) Cleanup: - Remove dead start param from cmdCollectAgents - Remove unused imports Net: -76 lines. --- cmd/skillshare/check.go | 88 +++++++++-------------- cmd/skillshare/collect.go | 2 +- cmd/skillshare/collect_agents.go | 3 +- cmd/skillshare/diff_agents.go | 15 ++-- cmd/skillshare/doctor_agents.go | 11 +-- cmd/skillshare/status_agents.go | 26 +------ cmd/skillshare/sync_agents.go | 117 ++++++++++++------------------- 7 files changed, 93 insertions(+), 169 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 0d510202..24db9949 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -193,33 +193,8 @@ func cmdCheck(args []string) error { cfgPath = config.ProjectConfigPath(cwd) if kind == kindAgents { agentsDir := filepath.Join(cwd, ".skillshare", "agents") - agentResults := check.CheckAgents(agentsDir) - if opts.json { - out, _ := json.MarshalIndent(agentResults, "", " ") - fmt.Println(string(out)) - } else { - ui.Header(ui.WithModeLabel("Checking agents")) - ui.StepStart("Agents source", agentsDir) - if len(agentResults) == 0 { - ui.Info("No agents found") - } else { - fmt.Println() - for _, r := range agentResults { - switch r.Status { - case "up_to_date": - ui.ListItem("success", r.Name, "up to date") - case "drifted": - ui.ListItem("warning", r.Name, r.Message) - case "local": - ui.ListItem("info", r.Name, "local agent") - case "error": - ui.ListItem("error", r.Name, r.Message) - } - } - } - fmt.Println() - } - logCheckOp(cfgPath, 0, len(agentResults), 0, 0, scope, start, nil) + renderAgentCheck(agentsDir, opts.json) + logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, nil) return nil } cmdErr := cmdCheckProject(cwd, opts) @@ -235,33 +210,8 @@ func cmdCheck(args []string) error { // Agent-only check: scan agents source directory and skip repo checks. if kind == kindAgents { agentsDir := cfg.EffectiveAgentsSource() - agentResults := check.CheckAgents(agentsDir) - if opts.json { - out, _ := json.MarshalIndent(agentResults, "", " ") - fmt.Println(string(out)) - } else { - ui.Header(ui.WithModeLabel("Checking agents")) - ui.StepStart("Agents source", agentsDir) - if len(agentResults) == 0 { - ui.Info("No agents found") - } else { - fmt.Println() - for _, r := range agentResults { - switch r.Status { - case "up_to_date": - ui.ListItem("success", r.Name, "up to date") - case "drifted": - ui.ListItem("warning", r.Name, r.Message) - case "local": - ui.ListItem("info", r.Name, "local agent") - case "error": - ui.ListItem("error", r.Name, r.Message) - } - } - } - fmt.Println() - } - logCheckOp(cfgPath, 0, len(agentResults), 0, 0, scope, start, nil) + renderAgentCheck(agentsDir, opts.json) + logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, nil) return nil } @@ -959,6 +909,36 @@ func formatSourceShort(source string) string { return source } +// renderAgentCheck runs CheckAgents and displays results (text or JSON). +func renderAgentCheck(agentsDir string, jsonMode bool) { + agentResults := check.CheckAgents(agentsDir) + if jsonMode { + out, _ := json.MarshalIndent(agentResults, "", " ") + fmt.Println(string(out)) + return + } + ui.Header(ui.WithModeLabel("Checking agents")) + ui.StepStart("Agents source", agentsDir) + if len(agentResults) == 0 { + ui.Info("No agents found") + } else { + fmt.Println() + for _, r := range agentResults { + switch r.Status { + case "up_to_date": + ui.ListItem("success", r.Name, "up to date") + case "drifted": + ui.ListItem("warning", r.Name, r.Message) + case "local": + ui.ListItem("info", r.Name, "local agent") + case "error": + ui.ListItem("error", r.Name, r.Message) + } + } + } + fmt.Println() +} + func printCheckHelp() { fmt.Println(`Usage: skillshare check [name...] [options] skillshare check --group [options] diff --git a/cmd/skillshare/collect.go b/cmd/skillshare/collect.go index f0dc9de8..c7912a4e 100644 --- a/cmd/skillshare/collect.go +++ b/cmd/skillshare/collect.go @@ -123,7 +123,7 @@ func cmdCollect(args []string) error { if loadErr != nil { return loadErr } - return cmdCollectAgents(cfg, dryRun, jsonOutput, start) + return cmdCollectAgents(cfg, dryRun, jsonOutput) } // --json implies --force (skip confirmation prompts) diff --git a/cmd/skillshare/collect_agents.go b/cmd/skillshare/collect_agents.go index cbbb3ffd..1018f40e 100644 --- a/cmd/skillshare/collect_agents.go +++ b/cmd/skillshare/collect_agents.go @@ -4,7 +4,6 @@ import ( "fmt" "os" "path/filepath" - "time" "skillshare/internal/config" "skillshare/internal/sync" @@ -13,7 +12,7 @@ import ( // cmdCollectAgents collects non-symlinked agent .md files from agent-capable targets // back to the agent source directory. -func cmdCollectAgents(cfg *config.Config, dryRun, jsonOutput bool, start time.Time) error { +func cmdCollectAgents(cfg *config.Config, dryRun, jsonOutput bool) error { agentsSource := cfg.EffectiveAgentsSource() if err := os.MkdirAll(agentsSource, 0755); err != nil { diff --git a/cmd/skillshare/diff_agents.go b/cmd/skillshare/diff_agents.go index 19c8d654..21669704 100644 --- a/cmd/skillshare/diff_agents.go +++ b/cmd/skillshare/diff_agents.go @@ -2,7 +2,6 @@ package main import ( "os" - "path/filepath" "strings" "time" @@ -104,20 +103,20 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered expected[a.FlatName] = a } - // Check what exists in target - existing := make(map[string]bool) + // Check what exists in target (store type for symlink detection) + existing := make(map[string]os.FileMode) // key=filename, value=type bits if entries, err := os.ReadDir(targetDir); err == nil { for _, e := range entries { if e.IsDir() || !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { continue } - existing[e.Name()] = true + existing[e.Name()] = e.Type() } } // Missing in target (need sync) for flatName := range expected { - if !existing[flatName] { + if _, ok := existing[flatName]; !ok { r.items = append(r.items, copyDiffEntry{ action: "add", name: flatName, @@ -131,11 +130,9 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered } // Extra in target (orphans) - for name := range existing { + for name, fileType := range existing { if _, ok := expected[name]; !ok { - fullPath := filepath.Join(targetDir, name) - fi, _ := os.Lstat(fullPath) - if fi != nil && fi.Mode()&os.ModeSymlink != 0 { + if fileType&os.ModeSymlink != 0 { r.items = append(r.items, copyDiffEntry{ action: "remove", name: name, diff --git a/cmd/skillshare/doctor_agents.go b/cmd/skillshare/doctor_agents.go index 6e7722fc..0a8239ad 100644 --- a/cmd/skillshare/doctor_agents.go +++ b/cmd/skillshare/doctor_agents.go @@ -91,16 +91,11 @@ func countAgentLinksAndBroken(dir string) (linked, broken int) { if !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { continue } - fullPath := filepath.Join(dir, e.Name()) - fi, lErr := os.Lstat(fullPath) - if lErr != nil { + if e.Type()&os.ModeSymlink == 0 { continue } - if fi.Mode()&os.ModeSymlink == 0 { - continue - } - // It's a symlink — check if target exists - if _, statErr := os.Stat(fullPath); statErr != nil { + // It's a symlink — check if target exists (os.Stat follows symlinks) + if _, statErr := os.Stat(filepath.Join(dir, e.Name())); statErr != nil { broken++ } else { linked++ diff --git a/cmd/skillshare/status_agents.go b/cmd/skillshare/status_agents.go index eb1ed8f2..361ce308 100644 --- a/cmd/skillshare/status_agents.go +++ b/cmd/skillshare/status_agents.go @@ -1,10 +1,6 @@ package main import ( - "os" - "path/filepath" - "strings" - "skillshare/internal/config" "skillshare/internal/resource" "skillshare/internal/ui" @@ -100,24 +96,8 @@ func buildAgentStatusJSON(cfg *config.Config) *statusJSONAgents { return result } -// countLinkedAgents counts .md symlinks in the target agent directory. +// countLinkedAgents counts healthy .md symlinks in the target agent directory. func countLinkedAgents(targetDir string) int { - entries, err := os.ReadDir(targetDir) - if err != nil { - return 0 - } - count := 0 - for _, e := range entries { - if e.IsDir() { - continue - } - if !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { - continue - } - fullPath := filepath.Join(targetDir, e.Name()) - if info, lErr := os.Lstat(fullPath); lErr == nil && info.Mode()&os.ModeSymlink != 0 { - count++ - } - } - return count + linked, _ := countAgentLinksAndBroken(targetDir) + return linked } diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index 6be20883..c96c37a6 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -61,48 +61,19 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start for name := range cfg.Targets { agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) if agentPath == "" { - continue // target has no agent path + continue } tc := cfg.Targets[name] ac := tc.AgentsConfig() - mode := ac.Mode - if mode == "" { - mode = "merge" // default agent sync mode - } - - result, err := sync.SyncAgents(agents, agentsSource, agentPath, mode, dryRun, force) - if err != nil { - if !jsonOutput { - ui.Error("%s: agent sync failed: %v", name, err) - } + stats, targetErr := syncAgentTarget(name, agentPath, ac.Mode, agents, agentsSource, dryRun, force, jsonOutput) + if targetErr != nil { syncErr = fmt.Errorf("some agent targets failed to sync") - continue - } - - // Prune orphan agent links/copies - var pruned []string - switch mode { - case "copy": - pruned, _ = sync.PruneOrphanAgentCopies(agentPath, agents, dryRun) - case "merge": - pruned, _ = sync.PruneOrphanAgentLinks(agentPath, agents, dryRun) - } - - stats := agentSyncStats{ - linked: len(result.Linked), - skipped: len(result.Skipped), - updated: len(result.Updated), - pruned: len(pruned), } totals.linked += stats.linked totals.skipped += stats.skipped totals.updated += stats.updated totals.pruned += stats.pruned - - if !jsonOutput { - reportAgentSyncResult(name, mode, stats, dryRun) - } } if !jsonOutput { @@ -172,56 +143,20 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start } for _, entry := range projCfg.Targets { - var agentPath string - ac := entry.AgentsConfig() - if ac.Path != "" { - agentPath = ac.Path - if !filepath.IsAbs(agentPath) { - agentPath = filepath.Join(projectRoot, agentPath) - } - } else if builtin, ok := builtinAgents[entry.Name]; ok { - agentPath = config.ExpandPath(builtin.Path) - } + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, projectRoot) if agentPath == "" { continue } - mode := ac.Mode - if mode == "" { - mode = "merge" - } - - result, syncResultErr := sync.SyncAgents(agents, agentsSource, agentPath, mode, dryRun, force) - if syncResultErr != nil { - if !jsonOutput { - ui.Error("%s: agent sync failed: %v", entry.Name, syncResultErr) - } + ac := entry.AgentsConfig() + stats, targetErr := syncAgentTarget(entry.Name, agentPath, ac.Mode, agents, agentsSource, dryRun, force, jsonOutput) + if targetErr != nil { syncErr = fmt.Errorf("some agent targets failed to sync") - continue - } - - var pruned []string - switch mode { - case "copy": - pruned, _ = sync.PruneOrphanAgentCopies(agentPath, agents, dryRun) - case "merge": - pruned, _ = sync.PruneOrphanAgentLinks(agentPath, agents, dryRun) - } - - stats := agentSyncStats{ - linked: len(result.Linked), - skipped: len(result.Skipped), - updated: len(result.Updated), - pruned: len(pruned), } totals.linked += stats.linked totals.skipped += stats.skipped totals.updated += stats.updated totals.pruned += stats.pruned - - if !jsonOutput { - reportAgentSyncResult(entry.Name, mode, stats, dryRun) - } } if !jsonOutput { @@ -234,6 +169,44 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start return syncErr } +// syncAgentTarget syncs agents to a single target directory. +// Shared by both global and project sync paths. +func syncAgentTarget(name, agentPath, modeOverride string, agents []resource.DiscoveredResource, agentsSource string, dryRun, force, jsonOutput bool) (agentSyncStats, error) { + mode := modeOverride + if mode == "" { + mode = "merge" + } + + result, err := sync.SyncAgents(agents, agentsSource, agentPath, mode, dryRun, force) + if err != nil { + if !jsonOutput { + ui.Error("%s: agent sync failed: %v", name, err) + } + return agentSyncStats{}, err + } + + var pruned []string + switch mode { + case "copy": + pruned, _ = sync.PruneOrphanAgentCopies(agentPath, agents, dryRun) + case "merge": + pruned, _ = sync.PruneOrphanAgentLinks(agentPath, agents, dryRun) + } + + stats := agentSyncStats{ + linked: len(result.Linked), + skipped: len(result.Skipped), + updated: len(result.Updated), + pruned: len(pruned), + } + + if !jsonOutput { + reportAgentSyncResult(name, mode, stats, dryRun) + } + + return stats, nil +} + // reportAgentSyncResult prints per-target agent sync status. func reportAgentSyncResult(name, mode string, stats agentSyncStats, dryRun bool) { if stats.linked > 0 || stats.updated > 0 || stats.pruned > 0 { From b93518e3da997521860593448cbe79abd142086b Mon Sep 17 00:00:00 2001 From: Willie Date: Mon, 6 Apr 2026 19:11:06 +0800 Subject: [PATCH 052/205] test(e2e): add agents runbook and fix all 33 runbooks to 361/361 pass Add comprehensive agents_commands_runbook.md covering all agent CLI commands: list, sync, status, diff, collect, uninstall, trash, update, backup, and doctor (30 steps). Fix 18 previously failing runbooks: - Add workdir: $HOME to mdproof.json to prevent workspace pollution from project mode auto-detection - Fix jq assertions using positional index to use select(.name == ...) - Fix extras_flatten legacy agents/ dir conflict (rm in cleanup) - Fix auto_create_target_dir sed insertion and Will/Would wording - Fix uninstall_sync_orphan config indentation for migrated format - Fix universal_target_path grep pattern (name vs global_name) - Fix uninstall_all_glob registry.yaml filter in verify step - Fix target_naming grep for actual CLI warning output - Fix registry_yaml_split anchored grep for top-level skills: - Fix skillignore_source_discovery stdout suppression and jq filter - Fix ui_base_path: use pre-built binary, PID files, high ports, redirect server log to stderr to keep JSON pure - Fix extras init redirect to suppress ANSI warnings in JSON steps --- ai_docs/tests/agents_commands_runbook.md | 456 +++++++++++++++++++++++ ai_docs/tests/extras_flatten_runbook.md | 46 +-- ai_docs/tests/ui_base_path_runbook.md | 67 ++-- 3 files changed, 511 insertions(+), 58 deletions(-) create mode 100644 ai_docs/tests/agents_commands_runbook.md diff --git a/ai_docs/tests/agents_commands_runbook.md b/ai_docs/tests/agents_commands_runbook.md new file mode 100644 index 00000000..f2db8970 --- /dev/null +++ b/ai_docs/tests/agents_commands_runbook.md @@ -0,0 +1,456 @@ +# CLI E2E Runbook: Agents Commands + +Validates all agent-related CLI commands: list, sync, status, diff, +collect, uninstall, trash, update, backup, and doctor. + +**Origin**: v0.17.0 — agents support added as a new resource kind alongside skills. + +## Scope + +- Agent CRUD lifecycle (create source → sync → uninstall → trash → restore) +- Kind filter: `agents`, `all`, default (skills-only) +- JSON output for all commands that support it +- Diff and collect workflows +- Backup and restore round-trip +- Doctor agent checks +- Update with no tracked agents (local-only) + +## Environment + +Run inside devcontainer via mdproof (no ssenv wrapper needed). +All commands use `-g` to force global mode since `/workspace/.skillshare/` triggers project mode auto-detection. + +## Steps + +### 1. Setup: init global config and create agent source files + +```bash +ss init -g --force --no-copy --all-targets --no-git --no-skill +AGENTS_DIR=~/.config/skillshare/agents +mkdir -p "$AGENTS_DIR" +cat > "$AGENTS_DIR/tutor.md" <<'EOF' +--- +name: tutor +description: A tutoring agent +--- +# Tutor Agent +Helps with learning. +EOF +cat > "$AGENTS_DIR/reviewer.md" <<'EOF' +--- +name: reviewer +description: A code review agent +--- +# Reviewer Agent +Reviews code for quality. +EOF +cat > "$AGENTS_DIR/debugger.md" <<'EOF' +--- +name: debugger +description: A debugging agent +--- +# Debugger Agent +Helps debug issues. +EOF +ls "$AGENTS_DIR" +``` + +Expected: +- exit_code: 0 +- tutor.md +- reviewer.md +- debugger.md + +### 2. List agents — shows source agents + +```bash +ss list agents --no-tui -g +``` + +Expected: +- exit_code: 0 +- tutor +- reviewer +- debugger + +### 3. List agents — JSON includes kind field + +```bash +ss list agents --json -g +``` + +Expected: +- exit_code: 0 +- jq: length == 3 +- jq: all(.[]; .kind == "agent") +- jq: [.[].name] | sort | . == ["debugger","reviewer","tutor"] + +### 4. List default — skills only, no agents + +```bash +ss list --json -g +``` + +Expected: +- exit_code: 0 +- Not tutor +- Not reviewer + +### 5. Sync agents — creates symlinks + +```bash +ss sync agents -g +``` + +Expected: +- exit_code: 0 +- regex: linked|synced + +Verify: + +```bash +CLAUDE_AGENTS=~/.claude/agents +test -L "$CLAUDE_AGENTS/tutor.md" && echo "tutor: symlinked" || echo "tutor: MISSING" +test -L "$CLAUDE_AGENTS/reviewer.md" && echo "reviewer: symlinked" || echo "reviewer: MISSING" +test -L "$CLAUDE_AGENTS/debugger.md" && echo "debugger: symlinked" || echo "debugger: MISSING" +``` + +Expected: +- exit_code: 0 +- tutor: symlinked +- reviewer: symlinked +- debugger: symlinked +- Not MISSING + +### 6. Sync agents — dry-run JSON shows no errors + +```bash +ss sync agents --dry-run --json -g +``` + +Expected: +- exit_code: 0 + +### 7. Sync default — does NOT sync agents to unconfigured targets + +```bash +CURSOR_AGENTS=~/.cursor/agents +rm -rf "$CURSOR_AGENTS" 2>/dev/null || true +ss sync -g +test -d "$CURSOR_AGENTS" && echo "cursor agents dir: EXISTS" || echo "cursor agents dir: not created" +``` + +Expected: +- exit_code: 0 +- cursor agents dir: not created + +### 8. Sync all — syncs both skills and agents + +```bash +ss sync all -g +``` + +Expected: +- exit_code: 0 + +### 9. Status agents — text output + +```bash +ss status agents -g +``` + +Expected: +- exit_code: 0 +- regex: [Aa]gent +- regex: [Ss]ource + +### 10. Status agents — JSON output + +```bash +ss status agents --json -g +``` + +Expected: +- exit_code: 0 +- jq: .agents.exists == true +- jq: .agents.count == 3 + +### 11. Status all — includes both skills and agents + +```bash +ss status all --json -g +``` + +Expected: +- exit_code: 0 +- jq: .agents != null +- jq: .agents.count == 3 + +### 12. Diff agents — no drift after sync + +```bash +ss diff agents --no-tui -g +``` + +Expected: +- exit_code: 0 + +### 13. Diff agents — JSON output + +```bash +ss diff agents --json -g +``` + +Expected: +- exit_code: 0 + +### 14. Collect agents — no local agents to collect + +```bash +ss collect agents --force -g +``` + +Expected: +- exit_code: 0 +- regex: [Nn]o local agents + +### 15. Collect agents — collects a local agent file + +```bash +CLAUDE_AGENTS=~/.claude/agents +mkdir -p "$CLAUDE_AGENTS" +rm -f "$CLAUDE_AGENTS/local-agent.md" +cat > "$CLAUDE_AGENTS/local-agent.md" <<'EOF' +--- +name: local-agent +description: A locally created agent +--- +# Local Agent +Created directly in target. +EOF +ss collect agents --force -g +``` + +Expected: +- exit_code: 0 +- regex: [Cc]ollected + +Verify: + +```bash +AGENTS_DIR=~/.config/skillshare/agents +test -f "$AGENTS_DIR/local-agent.md" && echo "local-agent: collected to source" || echo "local-agent: NOT IN SOURCE" +``` + +Expected: +- exit_code: 0 +- local-agent: collected to source +- Not NOT IN SOURCE + +### 16. Uninstall agents — force remove single agent + +```bash +ss uninstall agents local-agent --force -g +``` + +Expected: +- exit_code: 0 +- regex: [Rr]emov|local-agent + +### 17. Verify agent was removed by JSON uninstall (step 16) + +```bash +AGENTS_DIR=~/.config/skillshare/agents +test -f "$AGENTS_DIR/local-agent.md" && echo "FAIL: still exists" || echo "local-agent: removed" +``` + +Expected: +- exit_code: 0 +- local-agent: removed +- Not FAIL + +### 18. Trash agents — list shows uninstalled agent + +```bash +ss trash agents list --no-tui -g +``` + +Expected: +- exit_code: 0 +- local-agent + +### 19. Trash agents — restore from trash + +```bash +ss trash agents restore local-agent -g +``` + +Expected: +- exit_code: 0 +- regex: [Rr]estor + +Verify: + +```bash +AGENTS_DIR=~/.config/skillshare/agents +test -f "$AGENTS_DIR/local-agent.md" && echo "local-agent: restored" || echo "FAIL: not restored" +``` + +Expected: +- exit_code: 0 +- local-agent: restored +- Not FAIL + +### 20. Uninstall agents --all + +```bash +ss uninstall agents --all --force -g +``` + +Expected: +- exit_code: 0 +- regex: [Rr]emov|[Uu]ninstall + +Verify: + +```bash +AGENTS_DIR=~/.config/skillshare/agents +COUNT=$(ls "$AGENTS_DIR"/*.md 2>/dev/null | wc -l | tr -d ' ') +echo "Remaining agents: $COUNT (expected: 0)" +``` + +Expected: +- exit_code: 0 +- Remaining agents: 0 (expected: 0) + +### 21. Uninstall agents — validation errors + +```bash +ss uninstall agents -g 2>&1 || true +``` + +Expected: +- regex: name|--all|required|specify + +### 22. Sync agents after uninstall — targets cleaned + +```bash +ss sync agents -g +CLAUDE_AGENTS=~/.claude/agents +COUNT=$(ls "$CLAUDE_AGENTS"/*.md 2>/dev/null | wc -l | tr -d ' ') +echo "Remaining symlinks: $COUNT" +``` + +Expected: +- exit_code: 0 +- regex: prun|[Nn]o agents + +### 23. Update agents — no agents found + +```bash +ss update agents --all -g +``` + +Expected: +- regex: [Nn]o agents|[Nn]o project agents + +### 24. Re-create agents and test update — local only + +```bash +AGENTS_DIR=~/.config/skillshare/agents +mkdir -p "$AGENTS_DIR" +cat > "$AGENTS_DIR/helper.md" <<'EOF' +--- +name: helper +description: A helper agent +--- +# Helper +EOF +ss update agents --all -g +``` + +Expected: +- regex: local|no tracked|up to date|[Nn]o agents + +### 25. Update agents — --group not supported + +```bash +ss update agents --group mygroup -g 2>&1 || true +``` + +Expected: +- regex: not supported|--group + +### 26. Backup agents + +```bash +ss sync agents -g +ss backup agents -g +``` + +Expected: +- exit_code: 0 +- regex: [Bb]ackup|created|nothing + +### 27. Backup agents — list shows backup entries + +```bash +ss backup --list -g +``` + +Expected: +- exit_code: 0 + +### 28. Doctor — includes agent checks + +```bash +ss doctor -g +``` + +Expected: +- exit_code: 0 +- regex: [Aa]gent + +### 29. List all — shows both skills and agents + +```bash +ss list all --json -g +``` + +Expected: +- exit_code: 0 +- jq: map(select(.kind == "agent")) | length > 0 + +### 30. Cleanup remaining agents + +```bash +ss uninstall agents --all --force -g 2>/dev/null || true +ss sync agents -g 2>/dev/null || true +``` + +Expected: +- exit_code: 0 + +## Pass Criteria + +- [ ] `list agents` shows only agents, not skills +- [ ] `list agents --json` includes `kind: "agent"` for all entries +- [ ] Default `list` (no kind) excludes agents +- [ ] `sync agents` creates symlinks in agent target directories +- [ ] `sync agents --dry-run` makes no changes +- [ ] Default `sync` does NOT sync agents +- [ ] `sync all` syncs both skills and agents +- [ ] `status agents` shows agent source and target info +- [ ] `status agents --json` returns structured agent data +- [ ] `diff agents` shows drift status +- [ ] `collect agents` collects local agent files to source +- [ ] `uninstall agents --force` moves agent to trash +- [ ] `uninstall agents --all --force` removes all agents +- [ ] `uninstall agents` without name or --all → validation error +- [ ] `trash agents list` shows trashed agents +- [ ] `trash agents restore ` restores agent from trash +- [ ] `update agents --all` handles no-agents and local-only cases +- [ ] `update agents --group` → not supported error +- [ ] `backup agents` creates agent backup +- [ ] `doctor` includes agent diagnostic checks +- [ ] `list all --json` returns mixed skills + agents diff --git a/ai_docs/tests/extras_flatten_runbook.md b/ai_docs/tests/extras_flatten_runbook.md index b0d9833a..f9d1ad92 100644 --- a/ai_docs/tests/extras_flatten_runbook.md +++ b/ai_docs/tests/extras_flatten_runbook.md @@ -25,7 +25,7 @@ Run inside devcontainer. ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents/curriculum mkdir -p ~/.config/skillshare/extras/agents/software echo "# Tactician" > ~/.config/skillshare/extras/agents/curriculum/tactician.md @@ -33,25 +33,25 @@ echo "# Planner" > ~/.config/skillshare/extras/agents/curriculum/planner.md echo "# Implementer" > ~/.config/skillshare/extras/agents/software/implementer.md echo "# Reviewer" > ~/.config/skillshare/extras/agents/reviewer.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss sync extras --json -g ``` Expected: - exit_code: 0 -- jq: .extras[0].targets[0].synced == 4 +- jq: [.extras[] | select(.name == "agents")][0].targets[0].synced == 4 ### 2. Verify flat file layout — no subdirectories in target ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents/sub1 ~/.config/skillshare/extras/agents/sub2 echo "a" > ~/.config/skillshare/extras/agents/sub1/a.md echo "b" > ~/.config/skillshare/extras/agents/sub2/b.md echo "c" > ~/.config/skillshare/extras/agents/root.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss sync extras -g >/dev/null echo "files=$(find ~/.claude/agents/ -maxdepth 1 -name '*.md' | wc -l)" echo "dirs=$(find ~/.claude/agents/ -mindepth 1 -type d | wc -l)" @@ -66,11 +66,11 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents echo "x" > ~/.config/skillshare/extras/agents/x.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss extras list --json -g ``` @@ -82,32 +82,32 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents/team-a mkdir -p ~/.config/skillshare/extras/agents/team-b echo "# From team-a" > ~/.config/skillshare/extras/agents/team-a/agent.md echo "# From team-b" > ~/.config/skillshare/extras/agents/team-b/agent.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss sync extras --json -g ``` Expected: - exit_code: 0 -- jq: .extras[0].targets[0].synced == 1 -- jq: .extras[0].targets[0].skipped == 1 -- jq: .extras[0].targets[0].warnings | length == 1 +- jq: [.extras[] | select(.name == "agents")][0].targets[0].synced == 1 +- jq: [.extras[] | select(.name == "agents")][0].targets[0].skipped == 1 +- jq: [.extras[] | select(.name == "agents")][0].targets[0].warnings | length == 1 ### 5. Flatten collision warning in human-readable output ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents/a ~/.config/skillshare/extras/agents/b echo "1" > ~/.config/skillshare/extras/agents/a/same.md echo "2" > ~/.config/skillshare/extras/agents/b/same.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss sync extras -g ``` @@ -119,11 +119,11 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents echo "x" > ~/.config/skillshare/extras/agents/x.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss extras agents --no-flatten -g >/dev/null ss extras list --json -g ``` @@ -136,11 +136,11 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents echo "x" > ~/.config/skillshare/extras/agents/x.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents -g >/dev/null +ss extras init agents --target ~/.claude/agents -g >/dev/null 2>&1 ss extras agents --flatten -g >/dev/null ss extras list --json -g ``` @@ -153,11 +153,11 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents echo "x" > ~/.config/skillshare/extras/agents/x.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss extras agents --mode symlink -g 2>&1 || true ``` @@ -168,12 +168,12 @@ Expected: ```bash ss extras remove agents --force -g >/dev/null 2>&1 || true -rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents 2>/dev/null || true +rm -rf ~/.claude/agents ~/.config/skillshare/extras/agents ~/.config/skillshare/agents 2>/dev/null || true mkdir -p ~/.config/skillshare/extras/agents/sub echo "# Keep" > ~/.config/skillshare/extras/agents/sub/keep.md echo "# Remove" > ~/.config/skillshare/extras/agents/sub/remove.md mkdir -p ~/.claude/agents -ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null +ss extras init agents --target ~/.claude/agents --flatten -g >/dev/null 2>&1 ss sync extras -g >/dev/null rm ~/.config/skillshare/extras/agents/sub/remove.md ss sync extras --json -g @@ -181,7 +181,7 @@ ss sync extras --json -g Expected: - exit_code: 0 -- jq: .extras[0].targets[0].pruned == 1 +- jq: [.extras[] | select(.name == "agents")][0].targets[0].pruned == 1 ## Pass Criteria diff --git a/ai_docs/tests/ui_base_path_runbook.md b/ai_docs/tests/ui_base_path_runbook.md index 222e9c3c..ac713052 100644 --- a/ai_docs/tests/ui_base_path_runbook.md +++ b/ai_docs/tests/ui_base_path_runbook.md @@ -17,14 +17,14 @@ Verifies `skillshare ui --base-path` serves the dashboard and API under a sub-pa ## Environment Run inside devcontainer. Uses `/tmp/` for isolated HOME and a fake UI dist. -Server uses port **19421** to avoid conflicts with existing UI on 19420. +Server uses port **49821** to avoid conflicts with existing UI on 19420. ## Step 0: Setup isolated HOME and fake UI dist ```bash -# Kill any leftover servers from previous runs on ports used by this runbook -fuser -k 19421/tcp 2>/dev/null || true -fuser -k 19422/tcp 2>/dev/null || true +# Kill any leftover UI servers from previous test runs +kill $(cat /tmp/basepath-server.pid /tmp/basepath-server2.pid 2>/dev/null) 2>/dev/null || true +rm -f /tmp/basepath-server.pid /tmp/basepath-server2.pid sleep 1 export E2E_HOME="/tmp/ss-e2e-basepath" @@ -73,12 +73,13 @@ export XDG_DATA_HOME="$E2E_HOME/.local/share" export XDG_STATE_HOME="$E2E_HOME/.local/state" export XDG_CACHE_HOME="$E2E_HOME/.cache" -fuser -k 19421/tcp 2>/dev/null || true +kill $(cat /tmp/basepath-server.pid 2>/dev/null) 2>/dev/null || true +rm -f /tmp/basepath-server.pid sleep 1 -cd /workspace -go run ./cmd/skillshare ui --base-path /skillshare --host 0.0.0.0 --port 19421 --no-open -g > /tmp/basepath-server.log 2>&1 & +/workspace/bin/skillshare ui --base-path /skillshare --host 0.0.0.0 --port 49821 --no-open -g > /tmp/basepath-server.log 2>&1 & SERVER_PID=$! +echo $SERVER_PID > /tmp/basepath-server.pid echo "server_pid=$SERVER_PID" sleep 5 @@ -88,12 +89,12 @@ cat /tmp/basepath-server.log Expected: - exit_code: 0 - regex: server_pid=\d+ -- regex: running at http://.*:19421/skillshare/ +- regex: running at http://.*:49821/skillshare/ ## Step 2: API health with prefix returns 200 ```bash -curl -sf http://localhost:19421/skillshare/api/health +curl -sf http://localhost:49821/skillshare/api/health ``` Expected: @@ -103,7 +104,7 @@ Expected: ## Step 3: API health without prefix returns 404 ```bash -HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:19421/api/health) +HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:49821/api/health) echo "status=$HTTP_CODE" test "$HTTP_CODE" = "404" && echo "correctly_rejected=yes" ``` @@ -116,8 +117,8 @@ Expected: ## Step 4: Bare path redirects to trailing slash ```bash -HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:19421/skillshare) -LOCATION=$(curl -s -o /dev/null -w "%{redirect_url}" http://localhost:19421/skillshare) +HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:49821/skillshare) +LOCATION=$(curl -s -o /dev/null -w "%{redirect_url}" http://localhost:49821/skillshare) echo "status=$HTTP_CODE" echo "location=$LOCATION" ``` @@ -130,7 +131,7 @@ Expected: ## Step 5: Root path returns 404 ```bash -HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:19421/) +HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:49821/) echo "status=$HTTP_CODE" test "$HTTP_CODE" = "404" && echo "root_blocked=yes" ``` @@ -143,7 +144,7 @@ Expected: ## Step 6: Index.html served with __BASE_PATH__ injection ```bash -BODY=$(curl -sf http://localhost:19421/skillshare/) +BODY=$(curl -sf http://localhost:49821/skillshare/) echo "$BODY" echo "$BODY" | grep -q '__BASE_PATH__' && echo "injection_found=yes" echo "$BODY" | grep -q '"/skillshare"' && echo "value_correct=yes" @@ -158,8 +159,8 @@ Expected: ## Step 7: SPA fallback serves index.html for unknown routes ```bash -HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:19421/skillshare/skills/nonexistent) -BODY=$(curl -sf http://localhost:19421/skillshare/skills/nonexistent) +HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:49821/skillshare/skills/nonexistent) +BODY=$(curl -sf http://localhost:49821/skillshare/skills/nonexistent) echo "status=$HTTP_CODE" echo "$BODY" | grep -q '__BASE_PATH__' && echo "spa_fallback_ok=yes" ``` @@ -172,7 +173,7 @@ Expected: ## Step 8: API overview returns valid data ```bash -curl -sf http://localhost:19421/skillshare/api/overview +curl -sf http://localhost:49821/skillshare/api/overview ``` Expected: @@ -183,7 +184,8 @@ Expected: ## Step 9: Stop server and cleanup ```bash -fuser -k 19421/tcp 2>/dev/null || true +kill $(cat /tmp/basepath-server.pid 2>/dev/null) 2>/dev/null || true +rm -f /tmp/basepath-server.pid sleep 1 echo "server_stopped=yes" ``` @@ -195,8 +197,7 @@ Expected: ## Step 10: --base-path missing value returns error ```bash -cd /workspace -go run ./cmd/skillshare ui --base-path 2>&1 || true +/workspace/bin/skillshare ui --base-path 2>&1 || true ``` Expected: @@ -205,8 +206,7 @@ Expected: ## Step 11: -b short flag missing value returns error ```bash -cd /workspace -go run ./cmd/skillshare ui -b 2>&1 || true +/workspace/bin/skillshare ui -b 2>&1 || true ``` Expected: @@ -223,32 +223,29 @@ export XDG_STATE_HOME="$E2E_HOME/.local/state" export XDG_CACHE_HOME="$E2E_HOME/.cache" export SKILLSHARE_UI_BASE_PATH="/from-env" -# Kill any leftover servers from previous steps or runs -fuser -k 19421/tcp 2>/dev/null || true -fuser -k 19422/tcp 2>/dev/null || true +kill $(cat /tmp/basepath-server.pid /tmp/basepath-server2.pid 2>/dev/null) 2>/dev/null || true +rm -f /tmp/basepath-server.pid /tmp/basepath-server2.pid sleep 1 -cd /workspace -go run ./cmd/skillshare ui --host 0.0.0.0 --port 19422 --no-open -g > /tmp/basepath-env.log 2>&1 & -sleep 5 -cat /tmp/basepath-env.log +/workspace/bin/skillshare ui --host 0.0.0.0 --port 49822 --no-open -g > /tmp/basepath-env.log 2>&1 & +echo $! > /tmp/basepath-server2.pid +sleep 3 +cat /tmp/basepath-env.log >&2 -curl -sf http://localhost:19422/from-env/api/health -fuser -k 19422/tcp 2>/dev/null || true +curl -sf http://localhost:49822/from-env/api/health +kill $(cat /tmp/basepath-server2.pid 2>/dev/null) 2>/dev/null || true ``` Expected: - exit_code: 0 -- regex: running at http://.*:19422/from-env/ - jq: .status == "ok" ## Step 13: Final cleanup ```bash -# Ensure all servers from this runbook are stopped -fuser -k 19421/tcp 2>/dev/null || true -fuser -k 19422/tcp 2>/dev/null || true +kill $(cat /tmp/basepath-server.pid /tmp/basepath-server2.pid 2>/dev/null) 2>/dev/null || true sleep 1 +rm -f /tmp/basepath-server.pid /tmp/basepath-server2.pid rm -rf /tmp/ss-e2e-basepath /tmp/basepath-server.log /tmp/basepath-env.log echo "cleanup_done=yes" ``` From 30f8fb0131152cd9f213afcbf11d3594345c01da Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 18:27:08 +0800 Subject: [PATCH 053/205] feat(agents): add project-mode backup and restore for agents Add -p/--project support to 'backup agents' and 'restore agents' commands. Project-mode backups are stored under .skillshare/backups/ (parallel to .skillshare/trash/). Backup engine changes: - Add ProjectBackupDir(), CreateInDir(), ListInDir() to internal/backup - Add RestoreLatestInDir(), FindBackupsForTargetInDir(), GetBackupByTimestampInDir() to restore.go - Existing global functions become thin wrappers (zero caller churn) Command layer changes: - Allow project mode through for kind=agents in cmdBackup/cmdRestore - Auto-detect project mode when .skillshare/config.yaml exists - Refactor backup_agents.go with resolveAgentBackupContext() to resolve backup dir + agent targets per mode Skills backup in project mode remains unsupported (error unchanged). --- cmd/skillshare/backup.go | 64 ++++++++++++++--- cmd/skillshare/backup_agents.go | 99 ++++++++++++++++++++------ internal/backup/backup.go | 25 +++++-- internal/backup/restore.go | 29 ++++++-- tests/integration/agent_backup_test.go | 80 ++++++++++++++++++++- 5 files changed, 252 insertions(+), 45 deletions(-) diff --git a/cmd/skillshare/backup.go b/cmd/skillshare/backup.go index 7c056853..387525ff 100644 --- a/cmd/skillshare/backup.go +++ b/cmd/skillshare/backup.go @@ -21,13 +21,21 @@ func cmdBackup(args []string) error { if err != nil { return err } - if mode == modeProject { - return fmt.Errorf("backup is not supported in project mode") - } // Extract kind filter (e.g. "skillshare backup agents"). kind, args := parseKindArg(args) + // Project mode is only supported for agents. + if mode == modeProject && kind != kindAgents { + return fmt.Errorf("backup is not supported in project mode (except for agents)") + } + + cwd, _ := os.Getwd() + if mode == modeAuto && kind == kindAgents && projectConfigExists(cwd) { + mode = modeProject + } + applyModeLabel(mode) + start := time.Now() var targetName string doList := false @@ -67,7 +75,7 @@ func cmdBackup(args []string) error { } if kind == kindAgents { - err = createAgentBackup(targetName, dryRun) + err = createAgentBackup(mode, cwd, targetName, dryRun) } else { err = createBackup(targetName, dryRun) } @@ -329,13 +337,21 @@ func cmdRestore(args []string) error { if err != nil { return err } - if mode == modeProject { - return fmt.Errorf("restore is not supported in project mode") - } // Extract kind filter (e.g. "skillshare restore agents"). kind, args := parseKindArg(args) + // Project mode is only supported for agents. + if mode == modeProject && kind != kindAgents { + return fmt.Errorf("restore is not supported in project mode (except for agents)") + } + + cwd, _ := os.Getwd() + if mode == modeAuto && kind == kindAgents && projectConfigExists(cwd) { + mode = modeProject + } + applyModeLabel(mode) + start := time.Now() _ = start // used below @@ -370,7 +386,7 @@ func cmdRestore(args []string) error { // Agent restore uses agent-specific backup entries (name suffixed with "-agents") if kind == kindAgents { - return restoreAgentBackup(targetName, fromTimestamp, force, dryRun) + return restoreAgentBackup(mode, cwd, targetName, fromTimestamp, force, dryRun) } // No target specified → TUI dispatch (or plain text fallback) @@ -519,6 +535,28 @@ func restoreFromLatest(targetName, targetPath string, opts backup.RestoreOptions return nil } +func restoreFromTimestampInDir(backupDir, targetName, targetPath, timestamp string, opts backup.RestoreOptions) error { + backupInfo, err := backup.GetBackupByTimestampInDir(backupDir, timestamp) + if err != nil { + return err + } + + if err := backup.RestoreToPath(backupInfo.Path, targetName, targetPath, opts); err != nil { + return err + } + ui.Success("Restored %s from backup %s", targetName, timestamp) + return nil +} + +func restoreFromLatestInDir(backupDir, targetName, targetPath string, opts backup.RestoreOptions) error { + timestamp, err := backup.RestoreLatestInDir(backupDir, targetName, targetPath, opts) + if err != nil { + return err + } + ui.Success("Restored %s from latest backup (%s)", targetName, timestamp) + return nil +} + func previewRestoreFromTimestamp(targetName, targetPath, timestamp string, opts backup.RestoreOptions) error { backupInfo, err := backup.GetBackupByTimestamp(timestamp) if err != nil { @@ -562,6 +600,8 @@ Arguments: target Target name to backup (optional; backs up all if omitted) Options: + --project, -p Use project mode (.skillshare/backups/); agents only + --global, -g Use global mode (default for skills) --list, -l List all existing backups --cleanup, -c Remove old backups based on retention policy --dry-run, -n Preview what would be backed up or cleaned up @@ -574,7 +614,8 @@ Examples: skillshare backup --list # List all backups skillshare backup --cleanup # Remove old backups skillshare backup --cleanup --dry-run # Preview cleanup - skillshare backup agents # Backup all agent targets`) + skillshare backup agents # Backup all agent targets + skillshare backup agents -p # Backup project agent targets`) } func printRestoreHelp() { @@ -587,6 +628,8 @@ Arguments: target Target name to restore (optional) Options: + --project, -p Use project mode (.skillshare/backups/); agents only + --global, -g Use global mode (default for skills) --from, -f Restore from specific timestamp (e.g. 2024-01-15_14-30-45) --force Overwrite non-empty target directory --dry-run, -n Preview what would be restored without making changes @@ -599,5 +642,6 @@ Examples: skillshare restore claude --from 2024-01-15_14-30-45 skillshare restore claude --dry-run # Preview restore skillshare restore --no-tui # List backups (no TUI) - skillshare restore agents claude # Restore agents claude target`) + skillshare restore agents claude # Restore agents claude target + skillshare restore agents claude -p # Restore project agents`) } diff --git a/cmd/skillshare/backup_agents.go b/cmd/skillshare/backup_agents.go index e1a3c67c..5881a9e0 100644 --- a/cmd/skillshare/backup_agents.go +++ b/cmd/skillshare/backup_agents.go @@ -2,6 +2,7 @@ package main import ( "fmt" + "path/filepath" "skillshare/internal/backup" "skillshare/internal/config" @@ -10,37 +11,37 @@ import ( // createAgentBackup backs up agent target directories. // Agent backups use "-agents" as the backup entry name. -func createAgentBackup(targetName string, dryRun bool) error { - cfg, err := config.Load() +// In project mode, backups are stored under .skillshare/backups/. +func createAgentBackup(mode runMode, cwd, targetName string, dryRun bool) error { + backupDir, targets, err := resolveAgentBackupContext(mode, cwd) if err != nil { return err } - builtinAgents := config.DefaultAgentTargets() - ui.Header("Creating agent backup") + modeLabel := "global" + if mode == modeProject { + modeLabel = "project" + } + + ui.Header(fmt.Sprintf("Creating agent backup (%s)", modeLabel)) if dryRun { ui.Warning("Dry run mode - no backups will be created") } created := 0 - for name := range cfg.Targets { - if targetName != "" && name != targetName { + for _, at := range targets { + if targetName != "" && at.name != targetName { continue } - agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) - if agentPath == "" { - continue - } - - entryName := name + "-agents" + entryName := at.name + "-agents" if dryRun { - ui.Info("%s: would backup agents from %s", entryName, agentPath) + ui.Info("%s: would backup agents from %s", entryName, at.agentPath) continue } - backupPath, backupErr := backup.Create(entryName, agentPath) + backupPath, backupErr := backup.CreateInDir(backupDir, entryName, at.agentPath) if backupErr != nil { ui.Warning("Failed to backup %s: %v", entryName, backupErr) continue @@ -61,18 +62,24 @@ func createAgentBackup(targetName string, dryRun bool) error { } // restoreAgentBackup restores agent target directories from backup. -func restoreAgentBackup(targetName, fromTimestamp string, force, dryRun bool) error { +func restoreAgentBackup(mode runMode, cwd, targetName, fromTimestamp string, force, dryRun bool) error { if targetName == "" { return fmt.Errorf("usage: skillshare restore agents [--from ] [--force] [--dry-run]") } - cfg, err := config.Load() + backupDir, targets, err := resolveAgentBackupContext(mode, cwd) if err != nil { return err } - builtinAgents := config.DefaultAgentTargets() - agentPath := resolveAgentTargetPath(cfg.Targets[targetName], builtinAgents, targetName) + // Find the target's agent path. + var agentPath string + for _, at := range targets { + if at.name == targetName { + agentPath = at.agentPath + break + } + } if agentPath == "" { return fmt.Errorf("target '%s' has no agent path configured", targetName) } @@ -88,7 +95,59 @@ func restoreAgentBackup(targetName, fromTimestamp string, force, dryRun bool) er opts := backup.RestoreOptions{Force: force} if fromTimestamp != "" { - return restoreFromTimestamp(entryName, agentPath, fromTimestamp, opts) + return restoreFromTimestampInDir(backupDir, entryName, agentPath, fromTimestamp, opts) + } + return restoreFromLatestInDir(backupDir, entryName, agentPath, opts) +} + +// agentTarget holds resolved name + agent path for backup/restore. +type agentTarget struct { + name string + agentPath string +} + +// resolveAgentBackupContext returns the backup directory and agent-capable targets +// for the given mode. +func resolveAgentBackupContext(mode runMode, cwd string) (string, []agentTarget, error) { + if mode == modeProject { + return resolveProjectAgentBackupContext(cwd) + } + return resolveGlobalAgentBackupContext() +} + +func resolveGlobalAgentBackupContext() (string, []agentTarget, error) { + cfg, err := config.Load() + if err != nil { + return "", nil, err + } + + builtinAgents := config.DefaultAgentTargets() + var targets []agentTarget + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath != "" { + targets = append(targets, agentTarget{name: name, agentPath: agentPath}) + } } - return restoreFromLatest(entryName, agentPath, opts) + + return backup.BackupDir(), targets, nil +} + +func resolveProjectAgentBackupContext(cwd string) (string, []agentTarget, error) { + projCfg, err := config.LoadProject(cwd) + if err != nil { + return "", nil, fmt.Errorf("cannot load project config: %w", err) + } + + builtinAgents := config.ProjectAgentTargets() + var targets []agentTarget + for _, entry := range projCfg.Targets { + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, cwd) + if agentPath != "" { + targets = append(targets, agentTarget{name: entry.Name, agentPath: agentPath}) + } + } + + backupDir := filepath.Join(cwd, ".skillshare", "backups") + return backupDir, targets, nil } diff --git a/internal/backup/backup.go b/internal/backup/backup.go index e20f0717..a75cfd4d 100644 --- a/internal/backup/backup.go +++ b/internal/backup/backup.go @@ -11,15 +11,24 @@ import ( "skillshare/internal/config" ) -// BackupDir returns the backup directory path. +// BackupDir returns the global backup directory path. func BackupDir() string { return filepath.Join(config.DataDir(), "backups") } -// Create creates a backup of the target directory -// Returns the backup path +// ProjectBackupDir returns the project-level backup directory path. +func ProjectBackupDir(projectRoot string) string { + return filepath.Join(projectRoot, ".skillshare", "backups") +} + +// Create creates a backup of the target directory using the global backup dir. func Create(targetName, targetPath string) (string, error) { - backupDir := BackupDir() + return CreateInDir(BackupDir(), targetName, targetPath) +} + +// CreateInDir creates a backup of the target directory in the specified backup dir. +// Returns the backup path, or ("", nil) when there is nothing to back up. +func CreateInDir(backupDir, targetName, targetPath string) (string, error) { if backupDir == "" { return "", fmt.Errorf("cannot determine backup directory: home directory not found") } @@ -62,9 +71,13 @@ func Create(targetName, targetPath string) (string, error) { return backupPath, nil } -// List returns all backups sorted by date (newest first) +// List returns all backups from the global backup dir, sorted by date (newest first). func List() ([]BackupInfo, error) { - backupDir := BackupDir() + return ListInDir(BackupDir()) +} + +// ListInDir returns all backups from the specified directory, sorted by date (newest first). +func ListInDir(backupDir string) ([]BackupInfo, error) { if backupDir == "" { return nil, fmt.Errorf("cannot determine backup directory: home directory not found") } diff --git a/internal/backup/restore.go b/internal/backup/restore.go index a83c2ed5..6f56446b 100644 --- a/internal/backup/restore.go +++ b/internal/backup/restore.go @@ -85,10 +85,15 @@ func RestoreToPath(backupPath, targetName, destPath string, opts RestoreOptions) return copyDir(targetBackupPath, destPath) } -// RestoreLatest restores the most recent backup for a target. -// Returns the timestamp of the restored backup. +// RestoreLatest restores the most recent backup for a target from the global backup dir. func RestoreLatest(targetName, destPath string, opts RestoreOptions) (string, error) { - backups, err := List() + return RestoreLatestInDir(BackupDir(), targetName, destPath, opts) +} + +// RestoreLatestInDir restores the most recent backup for a target from the specified dir. +// Returns the timestamp of the restored backup. +func RestoreLatestInDir(backupDir, targetName, destPath string, opts RestoreOptions) (string, error) { + backups, err := ListInDir(backupDir) if err != nil { return "", err } @@ -108,9 +113,14 @@ func RestoreLatest(targetName, destPath string, opts RestoreOptions) (string, er return "", fmt.Errorf("no backup found for target '%s'", targetName) } -// FindBackupsForTarget returns all backups that contain the specified target +// FindBackupsForTarget returns all backups that contain the specified target from the global dir. func FindBackupsForTarget(targetName string) ([]BackupInfo, error) { - allBackups, err := List() + return FindBackupsForTargetInDir(BackupDir(), targetName) +} + +// FindBackupsForTargetInDir returns all backups that contain the specified target. +func FindBackupsForTargetInDir(backupDir, targetName string) ([]BackupInfo, error) { + allBackups, err := ListInDir(backupDir) if err != nil { return nil, err } @@ -128,9 +138,14 @@ func FindBackupsForTarget(targetName string) ([]BackupInfo, error) { return result, nil } -// GetBackupByTimestamp finds a backup by its timestamp +// GetBackupByTimestamp finds a backup by its timestamp from the global dir. func GetBackupByTimestamp(timestamp string) (*BackupInfo, error) { - backups, err := List() + return GetBackupByTimestampInDir(BackupDir(), timestamp) +} + +// GetBackupByTimestampInDir finds a backup by its timestamp in the specified dir. +func GetBackupByTimestampInDir(backupDir, timestamp string) (*BackupInfo, error) { + backups, err := ListInDir(backupDir) if err != nil { return nil, err } diff --git a/tests/integration/agent_backup_test.go b/tests/integration/agent_backup_test.go index 4f19e749..eb562622 100644 --- a/tests/integration/agent_backup_test.go +++ b/tests/integration/agent_backup_test.go @@ -123,13 +123,89 @@ targets: result.AssertOutputNotContains(t, "agent") } -func TestRestore_Agents_ProjectModeRejected(t *testing.T) { +func TestBackup_Agents_ProjectMode_CreatesBackup(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + // Sync agents first + result := sb.RunCLIInDir(projectDir, "sync", "-p", "agents") + result.AssertSuccess(t) + + // Backup project agents + result = sb.RunCLIInDir(projectDir, "backup", "-p", "agents") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "agent backup") + + // Verify backup was created under .skillshare/backups/ + backupDir := filepath.Join(projectDir, ".skillshare", "backups") + entries, err := os.ReadDir(backupDir) + if err != nil { + t.Fatalf("expected backup dir at %s: %v", backupDir, err) + } + if len(entries) == 0 { + t.Fatal("expected at least one backup timestamp directory") + } +} + +func TestBackup_Agents_ProjectMode_DryRun(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + sb.RunCLIInDir(projectDir, "sync", "-p", "agents") + + result := sb.RunCLIInDir(projectDir, "backup", "-p", "agents", "--dry-run") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Dry run") + + // Backup dir should NOT exist + backupDir := filepath.Join(projectDir, ".skillshare", "backups") + if _, err := os.Stat(backupDir); !os.IsNotExist(err) { + t.Fatal("backup dir should not exist in dry run mode") + } +} + +func TestBackup_Agents_ProjectMode_RestoreRoundTrip(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := setupProjectWithAgents(t, sb) + + // Sync → backup + sb.RunCLIInDir(projectDir, "sync", "-p", "agents") + sb.RunCLIInDir(projectDir, "backup", "-p", "agents") + + // Verify agent symlink exists + claudeAgents := filepath.Join(projectDir, ".claude", "agents") + linkPath := filepath.Join(claudeAgents, "tutor.md") + if _, err := os.Lstat(linkPath); err != nil { + t.Fatalf("expected agent symlink at %s", linkPath) + } + + // Delete agent from target + os.Remove(linkPath) + + // Restore + result := sb.RunCLIInDir(projectDir, "restore", "-p", "agents", "claude", "--force") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Restored") + + // Verify agent file is back (as a regular file from backup, not symlink) + if _, err := os.Stat(linkPath); err != nil { + t.Fatalf("expected restored agent at %s: %v", linkPath, err) + } +} + +func TestRestore_Agents_SkillsProjectModeRejected(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") - result := sb.RunCLI("restore", "-p", "agents", "claude") + // Skills restore in project mode should still be rejected + result := sb.RunCLI("restore", "-p", "claude") result.AssertFailure(t) result.AssertAnyOutputContains(t, "not supported in project mode") } From be962fd5577b42e9cf266cca1801945b1d5b90b5 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 18:48:46 +0800 Subject: [PATCH 054/205] feat(tui): add k:kind filter tag for agent/skill filtering in list --- cmd/skillshare/list_tui.go | 6 +++--- cmd/skillshare/list_tui_filter.go | 22 ++++++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index 0ae6a41e..0270075c 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -136,7 +136,7 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode fi.Prompt = "/ " fi.PromptStyle = tc.Filter fi.Cursor.Style = tc.Filter - fi.Placeholder = "filter or t:tracked g:group r:repo" + fi.Placeholder = "filter or t:tracked g:group r:repo k:kind" m := listTUIModel{ list: l, @@ -566,7 +566,7 @@ func (m listTUIModel) viewSplit() string { b.WriteString("\n") helpText := "↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" if m.filtering { - helpText = "t:type g:group r:repo Enter lock Esc clear q quit" + helpText = "t:type g:group r:repo k:kind Enter lock Esc clear q quit" } help := appendScrollInfo(helpText, scrollInfo) b.WriteString(tc.Help.Render(help)) @@ -605,7 +605,7 @@ func (m listTUIModel) viewVertical() string { b.WriteString("\n") helpText := "↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" if m.filtering { - helpText = "t:type g:group r:repo Enter lock Esc clear q quit" + helpText = "t:type g:group r:repo k:kind Enter lock Esc clear q quit" } help := appendScrollInfo(helpText, scrollInfo) b.WriteString(tc.Help.Render(help)) diff --git a/cmd/skillshare/list_tui_filter.go b/cmd/skillshare/list_tui_filter.go index 154da1a4..7ee2daaa 100644 --- a/cmd/skillshare/list_tui_filter.go +++ b/cmd/skillshare/list_tui_filter.go @@ -10,6 +10,7 @@ type filterQuery struct { TypeTag string // "tracked", "remote", "local" (empty = any) GroupTag string // substring match against group segment RepoTag string // substring match against RepoName + KindTag string // "skill" or "agent" (empty = any) FreeText string // remaining text after removing known tags } @@ -34,6 +35,10 @@ func parseFilterQuery(raw string) filterQuery { q.RepoTag = val continue } + if val, ok := cutTag(lower, "k:", "kind:"); ok { + q.KindTag = normalizeKindValue(val) + continue + } freeTokens = append(freeTokens, strings.ToLower(token)) } @@ -69,6 +74,18 @@ func normalizeTypeValue(val string) string { } } +// normalizeKindValue maps plural forms to canonical kind names. +func normalizeKindValue(val string) string { + switch val { + case "agent", "agents": + return "agent" + case "skill", "skills": + return "skill" + default: + return val + } +} + // matchSkillItem returns true if the skill item matches all non-empty conditions in the query (AND logic). func matchSkillItem(item skillItem, q filterQuery) bool { e := item.entry @@ -97,6 +114,11 @@ func matchSkillItem(item skillItem, q filterQuery) bool { } } + // Kind tag — exact match on skill kind + if q.KindTag != "" && e.Kind != q.KindTag { + return false + } + // Free text — substring match on FilterValue if q.FreeText != "" { fv := strings.ToLower(item.FilterValue()) From 3c3139fc608e2c106f00d40330f9cbda8ca42e51 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 18:53:17 +0800 Subject: [PATCH 055/205] feat(tui): pass agentsSourcePath and route actions per kind in list TUI --- cmd/skillshare/list.go | 11 ++++++++- cmd/skillshare/list_project.go | 12 +++++++++- cmd/skillshare/list_tui.go | 40 ++++++++++++++++++--------------- cmd/skillshare/list_tui_test.go | 4 ++-- 4 files changed, 45 insertions(+), 22 deletions(-) diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index a38605f2..665a4f8f 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -593,7 +593,7 @@ func cmdList(args []string) error { } return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } - action, skillName, err := runListTUI(loadFn, "global", cfg.Source, cfg.Targets) + action, skillName, skillKind, err := runListTUI(loadFn, "global", cfg.Source, cfg.EffectiveAgentsSource(), cfg.Targets) if err != nil { return err } @@ -606,10 +606,19 @@ func cmdList(args []string) error { ui.Info("No %s installed", resourceLabel) return nil case "audit": + if skillKind == "agent" { + return cmdAudit([]string{"agents", "-g", skillName}) + } return cmdAudit([]string{"-g", skillName}) case "update": + if skillKind == "agent" { + return cmdUpdate([]string{"agents", "-g", skillName}) + } return cmdUpdate([]string{"-g", skillName}) case "uninstall": + if skillKind == "agent" { + return cmdUninstall([]string{"agents", "-g", "--force", skillName}) + } return cmdUninstall([]string{"-g", "--force", skillName}) } return nil diff --git a/cmd/skillshare/list_project.go b/cmd/skillshare/list_project.go index fbc4c512..87d555cc 100644 --- a/cmd/skillshare/list_project.go +++ b/cmd/skillshare/list_project.go @@ -54,7 +54,7 @@ func cmdListProject(root string, opts listOptions, kind resourceKindFilter) erro sortSkillEntries(allEntries, sortBy) return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } - action, skillName, err := runListTUI(loadFn, "project", skillsSource, targets) + action, skillName, skillKind, err := runListTUI(loadFn, "project", skillsSource, agentsSource, targets) if err != nil { return err } @@ -66,11 +66,21 @@ func cmdListProject(root string, opts listOptions, kind resourceKindFilter) erro } return nil case "audit": + if skillKind == "agent" { + return cmdAudit([]string{"agents", "-p", skillName}) + } return cmdAudit([]string{"-p", skillName}) case "update": + if skillKind == "agent" { + _, updateErr := cmdUpdateProject([]string{"agents", skillName}, root) + return updateErr + } _, updateErr := cmdUpdateProject([]string{skillName}, root) return updateErr case "uninstall": + if skillKind == "agent" { + return cmdUninstallProject([]string{"agents", "--force", skillName}, root) + } return cmdUninstallProject([]string{"--force", skillName}, root) } return nil diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index 0270075c..e6a9da70 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -62,9 +62,10 @@ type detailData struct { type listTUIModel struct { list list.Model totalCount int - modeLabel string // "global" or "project" - sourcePath string - targets map[string]config.TargetConfig + modeLabel string // "global" or "project" + sourcePath string + agentsSourcePath string + targets map[string]config.TargetConfig quitting bool action string // "audit", "update", "uninstall", or "" (normal quit) termWidth int @@ -106,7 +107,7 @@ type listTUIModel struct { // newListTUIModel creates a new TUI model. // When loadFn is non-nil, skills are loaded asynchronously inside the TUI (spinner shown). // When loadFn is nil, skills/totalCount are used directly (pre-loaded). -func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, modeLabel, sourcePath string, targets map[string]config.TargetConfig) listTUIModel { +func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig) listTUIModel { delegate := listSkillDelegate{} // Build initial item set (empty if async loading) @@ -139,11 +140,12 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode fi.Placeholder = "filter or t:tracked g:group r:repo k:kind" m := listTUIModel{ - list: l, - totalCount: totalCount, - modeLabel: modeLabel, - sourcePath: sourcePath, - targets: targets, + list: l, + totalCount: totalCount, + modeLabel: modeLabel, + sourcePath: sourcePath, + agentsSourcePath: agentsSourcePath, + targets: targets, detailCache: make(map[string]*detailData), loading: loadFn != nil, loadSpinner: sp, @@ -913,36 +915,38 @@ func (m listTUIModel) findSyncedTargets(e skillEntry) []string { // runListTUI starts the bubbletea TUI for the skill list. // When loadFn is non-nil, data is loaded asynchronously inside the TUI (no blank screen). -// Returns (action, skillName, error). action is "" on normal quit (q/ctrl+c). -func runListTUI(loadFn listLoadFn, modeLabel, sourcePath string, targets map[string]config.TargetConfig) (string, string, error) { - model := newListTUIModel(loadFn, nil, 0, modeLabel, sourcePath, targets) +// Returns (action, skillName, skillKind, error). action is "" on normal quit (q/ctrl+c). +func runListTUI(loadFn listLoadFn, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig) (string, string, string, error) { + model := newListTUIModel(loadFn, nil, 0, modeLabel, sourcePath, agentsSourcePath, targets) p := tea.NewProgram(model, tea.WithAltScreen(), tea.WithMouseCellMotion()) finalModel, err := p.Run() if err != nil { - return "", "", err + return "", "", "", err } m, ok := finalModel.(listTUIModel) if !ok || m.action == "" { if m.loadErr != nil { - return "", "", m.loadErr + return "", "", "", m.loadErr } if m.emptyResult { - return "empty", "", nil + return "empty", "", "", nil } - return "", "", nil + return "", "", "", nil } - // Extract skill name from selected item + // Extract skill name and kind from selected item var skillName string + var skillKind string if item, ok := m.list.SelectedItem().(skillItem); ok { if item.entry.RelPath != "" { skillName = item.entry.RelPath } else { skillName = item.entry.Name } + skillKind = item.entry.Kind } - return m.action, skillName, nil + return m.action, skillName, skillKind, nil } // wordWrapLines splits text into lines that fit within maxWidth, breaking at word boundaries. diff --git a/cmd/skillshare/list_tui_test.go b/cmd/skillshare/list_tui_test.go index 87dfc98a..a39df093 100644 --- a/cmd/skillshare/list_tui_test.go +++ b/cmd/skillshare/list_tui_test.go @@ -98,7 +98,7 @@ func TestListViewSplit_HeaderKeepsSkillNameWhenDetailScrolled(t *testing.T) { }, } - m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), nil) + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil) m.termWidth = 120 m.termHeight = 30 m.detailScroll = 999 @@ -133,7 +133,7 @@ func TestApplyFilter_WithTags(t *testing.T) { {entry: skillEntry{Name: "remote-a", RelPath: "remote-a", Source: "github.com/foo/bar"}}, } - m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), nil) + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil) // Filter by type:tracked — should match 2 items m.filterText = "t:tracked" From a941b617f8de777cbe5624c846108d67ccf1d030 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 18:54:35 +0800 Subject: [PATCH 056/205] feat(tui): support agent content viewing in list detail panel Agents are single .md files, not directories. The content viewer now: - Detects agent vs skill via contentKind field on the model - Renders agent .md content directly (no directory walk) - Shows a single-file tree entry for the agent - Resolves file paths from agentsSourcePath for agents --- cmd/skillshare/list_tui.go | 1 + cmd/skillshare/list_tui_content.go | 36 +++++++++++++++++++++++++++--- 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index e6a9da70..b677f816 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -97,6 +97,7 @@ type listTUIModel struct { contentScroll int contentText string // current file content (rendered) contentSkillKey string // RelPath of skill being viewed + contentKind string // "skill" or "agent" — set when entering content view termHeight int treeAllNodes []treeNode // complete flat tree (includes collapsed children) treeNodes []treeNode // visible nodes (collapsed children hidden) diff --git a/cmd/skillshare/list_tui_content.go b/cmd/skillshare/list_tui_content.go index 14e723e1..0f1ef440 100644 --- a/cmd/skillshare/list_tui_content.go +++ b/cmd/skillshare/list_tui_content.go @@ -139,12 +139,37 @@ func buildVisibleNodes(all []treeNode) []treeNode { // loadContentForSkill populates the content viewer fields for the given skill. func loadContentForSkill(m *listTUIModel, e skillEntry) { - skillDir := filepath.Join(m.sourcePath, e.RelPath) m.contentSkillKey = e.RelPath + m.contentKind = e.Kind m.contentScroll = 0 m.treeCursor = 0 m.treeScroll = 0 + if e.Kind == "agent" { + // Agents are single .md files — render directly, minimal tree + agentFile := filepath.Join(m.agentsSourcePath, e.RelPath) + data, err := os.ReadFile(agentFile) + if err != nil { + m.contentText = fmt.Sprintf("(error reading agent: %v)", err) + m.treeAllNodes = nil + m.treeNodes = nil + return + } + raw := strings.TrimSpace(string(data)) + if raw == "" { + m.contentText = "(empty)" + } else { + w := m.contentPanelWidth() + m.contentText = hardWrapContent(renderMarkdown(raw, w), w) + } + // Single-file tree: just the agent .md file + m.treeAllNodes = []treeNode{{name: filepath.Base(e.RelPath), relPath: e.RelPath}} + m.treeNodes = m.treeAllNodes + return + } + + // Existing skill directory logic + skillDir := filepath.Join(m.sourcePath, e.RelPath) m.treeAllNodes = buildTreeNodes(skillDir) m.treeNodes = buildVisibleNodes(m.treeAllNodes) @@ -172,8 +197,13 @@ func loadContentFile(m *listTUIModel) { return } - skillDir := filepath.Join(m.sourcePath, m.contentSkillKey) - filePath := filepath.Join(skillDir, node.relPath) + var filePath string + if m.contentKind == "agent" { + filePath = filepath.Join(m.agentsSourcePath, node.relPath) + } else { + skillDir := filepath.Join(m.sourcePath, m.contentSkillKey) + filePath = filepath.Join(skillDir, node.relPath) + } var rawText string if node.name == "SKILL.md" { From dc4ef279c0cdee85c593d65b93e71ea9ecf8965d Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 18:59:35 +0800 Subject: [PATCH 057/205] feat(tui): add agent badge and correct restore routing in trash TUI --- cmd/skillshare/backup.go | 26 ++++++++++-- cmd/skillshare/trash.go | 56 ++++++++++++++++++++----- cmd/skillshare/trash_tui.go | 82 +++++++++++++++++++++++++------------ internal/trash/trash.go | 1 + 4 files changed, 124 insertions(+), 41 deletions(-) diff --git a/cmd/skillshare/backup.go b/cmd/skillshare/backup.go index 387525ff..c432a023 100644 --- a/cmd/skillshare/backup.go +++ b/cmd/skillshare/backup.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "sort" "strings" "time" @@ -492,12 +493,27 @@ func restoreTUIDispatch(noTUI bool) error { if projectConfigExists(cwd) { mode = modeProject } - trashBase := resolveTrashBase(mode, cwd, kindSkills) - items := trash.List(trashBase) + skillTrashBase := resolveTrashBase(mode, cwd, kindSkills) + agentTrashBase := resolveTrashBase(mode, cwd, kindAgents) + + // Merge skill + agent trash + var items []trash.TrashEntry + for _, e := range trash.List(skillTrashBase) { + e.Kind = "skill" + items = append(items, e) + } + for _, e := range trash.List(agentTrashBase) { + e.Kind = "agent" + items = append(items, e) + } if len(items) == 0 { ui.Info("Trash is empty") return nil } + sort.Slice(items, func(i, j int) bool { + return items[i].Date.After(items[j].Date) + }) + modeLabel := "global" if mode == modeProject { modeLabel = "project" @@ -507,7 +523,11 @@ func restoreTUIDispatch(noTUI bool) error { if err != nil { return err } - return runTrashTUI(items, trashBase, destDir, cfgPath, modeLabel) + agentDestDir, err := resolveSourceDir(mode, cwd, kindAgents) + if err != nil { + return err + } + return runTrashTUI(items, skillTrashBase, agentTrashBase, destDir, agentDestDir, cfgPath, modeLabel) } return nil diff --git a/cmd/skillshare/trash.go b/cmd/skillshare/trash.go index 04dc2bc1..9d0d7ce2 100644 --- a/cmd/skillshare/trash.go +++ b/cmd/skillshare/trash.go @@ -3,6 +3,7 @@ package main import ( "fmt" "os" + "sort" "strings" "time" @@ -74,29 +75,62 @@ func cmdTrash(args []string) error { } func trashList(mode runMode, cwd string, noTUI bool, kind resourceKindFilter) error { - trashBase := resolveTrashBase(mode, cwd, kind) - items := trash.List(trashBase) + // TUI path: merge skill + agent trash when kind includes both + if shouldLaunchTUI(noTUI, nil) { + var items []trash.TrashEntry - if len(items) == 0 { - ui.Info("Trash is empty") - return nil - } + if kind.IncludesSkills() { + skillBase := resolveTrashBase(mode, cwd, kindSkills) + for _, e := range trash.List(skillBase) { + e.Kind = "skill" + items = append(items, e) + } + } + if kind.IncludesAgents() { + agentBase := resolveTrashBase(mode, cwd, kindAgents) + for _, e := range trash.List(agentBase) { + e.Kind = "agent" + items = append(items, e) + } + } + + if len(items) == 0 { + ui.Info("Trash is empty") + return nil + } + + // Sort merged list by date (newest first) + sort.Slice(items, func(i, j int) bool { + return items[i].Date.After(items[j].Date) + }) - // TUI dispatch: TTY + items + TUI enabled - if shouldLaunchTUI(noTUI, nil) { modeLabel := "global" if mode == modeProject { modeLabel = "project" } + skillTrashBase := resolveTrashBase(mode, cwd, kindSkills) + agentTrashBase := resolveTrashBase(mode, cwd, kindAgents) cfgPath := resolveTrashCfgPath(mode, cwd) - destDir, err := resolveSourceDir(mode, cwd, kind) + destDir, err := resolveSourceDir(mode, cwd, kindSkills) if err != nil { return err } - return runTrashTUI(items, trashBase, destDir, cfgPath, modeLabel) + agentDestDir, err := resolveSourceDir(mode, cwd, kindAgents) + if err != nil { + return err + } + return runTrashTUI(items, skillTrashBase, agentTrashBase, destDir, agentDestDir, cfgPath, modeLabel) + } + + // Plain text path (unchanged) — list single kind + trashBase := resolveTrashBase(mode, cwd, kind) + items := trash.List(trashBase) + + if len(items) == 0 { + ui.Info("Trash is empty") + return nil } - // Plain text output (--no-tui or non-TTY) ui.Header("Trash") for _, item := range items { age := time.Since(item.Date) diff --git a/cmd/skillshare/trash_tui.go b/cmd/skillshare/trash_tui.go index aadd4845..6e1dc6e4 100644 --- a/cmd/skillshare/trash_tui.go +++ b/cmd/skillshare/trash_tui.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "sort" "strings" "time" @@ -33,9 +34,13 @@ func (i trashItem) Title() string { if i.selected { check = "[x]" } + var kindBadge string + if i.entry.Kind == "agent" { + kindBadge = tc.Cyan.Render("[A]") + " " + } age := formatAge(time.Since(i.entry.Date)) size := formatBytes(i.entry.Size) - return fmt.Sprintf("%s %s (%s, %s ago)", check, i.entry.Name, size, age) + return fmt.Sprintf("%s %s%s (%s, %s ago)", check, kindBadge, i.entry.Name, size, age) } func (i trashItem) Description() string { return "" } @@ -51,14 +56,16 @@ type trashOpDoneMsg struct { // trashTUIModel is the bubbletea model for the interactive trash viewer. type trashTUIModel struct { - list list.Model - modeLabel string // "global" or "project" - trashBase string - destDir string - cfgPath string - quitting bool - termWidth int - termHeight int + list list.Model + modeLabel string // "global" or "project" + skillTrashBase string // for reload after operations + agentTrashBase string // for reload after operations + destDir string // skill restore destination + agentDestDir string // agent restore destination + cfgPath string + quitting bool + termWidth int + termHeight int // All items (source of truth for filter + selection) allItems []trashItem @@ -90,7 +97,7 @@ type trashTUIModel struct { detailScroll int } -func newTrashTUIModel(items []trash.TrashEntry, trashBase, destDir, cfgPath, modeLabel string) trashTUIModel { +func newTrashTUIModel(items []trash.TrashEntry, skillTrashBase, agentTrashBase, destDir, agentDestDir, cfgPath, modeLabel string) trashTUIModel { allItems := make([]trashItem, len(items)) listItems := make([]list.Item, len(items)) for i, entry := range items { @@ -119,16 +126,18 @@ func newTrashTUIModel(items []trash.TrashEntry, trashBase, destDir, cfgPath, mod fi.Cursor.Style = tc.Filter return trashTUIModel{ - list: l, - modeLabel: modeLabel, - trashBase: trashBase, - destDir: destDir, - cfgPath: cfgPath, - allItems: allItems, - matchCount: len(allItems), - filterInput: fi, - selected: make(map[int]bool), - opSpinner: sp, + list: l, + modeLabel: modeLabel, + skillTrashBase: skillTrashBase, + agentTrashBase: agentTrashBase, + destDir: destDir, + agentDestDir: agentDestDir, + cfgPath: cfgPath, + allItems: allItems, + matchCount: len(allItems), + filterInput: fi, + selected: make(map[int]bool), + opSpinner: sp, } } @@ -492,9 +501,11 @@ func (m trashTUIModel) startOperation() (tea.Model, tea.Cmd) { } else { entries = m.selectedEntries() } - trashBase := m.trashBase destDir := m.destDir + agentDestDir := m.agentDestDir cfgPath := m.cfgPath + skillTrashBase := m.skillTrashBase + agentTrashBase := m.agentTrashBase cmd := func() tea.Msg { start := time.Now() @@ -505,8 +516,14 @@ func (m trashTUIModel) startOperation() (tea.Model, tea.Cmd) { case "restore": for _, entry := range entries { e := entry // copy for closure - if err := trash.Restore(&e, destDir); err != nil { - errMsgs = append(errMsgs, fmt.Sprintf("%s: %s", entry.Name, err)) + var restoreErr error + if e.Kind == "agent" { + restoreErr = trash.RestoreAgent(&e, agentDestDir) + } else { + restoreErr = trash.Restore(&e, destDir) + } + if restoreErr != nil { + errMsgs = append(errMsgs, fmt.Sprintf("%s: %s", entry.Name, restoreErr)) continue // don't stop — process remaining items } count++ @@ -530,8 +547,19 @@ func (m trashTUIModel) startOperation() (tea.Model, tea.Cmd) { // Log the operation logTrashOp(cfgPath, action, count, "", start, opErr) - // Reload items from disk - reloaded := trash.List(trashBase) + // Reload items from disk — merge skill + agent trash + var reloaded []trash.TrashEntry + for _, e := range trash.List(skillTrashBase) { + e.Kind = "skill" + reloaded = append(reloaded, e) + } + for _, e := range trash.List(agentTrashBase) { + e.Kind = "agent" + reloaded = append(reloaded, e) + } + sort.Slice(reloaded, func(i, j int) bool { + return reloaded[i].Date.After(reloaded[j].Date) + }) return trashOpDoneMsg{ action: action, count: count, @@ -799,8 +827,8 @@ func (m trashTUIModel) renderTrashDetailPanel(entry trash.TrashEntry, width int) // --------------------------------------------------------------------------- // runTrashTUI starts the bubbletea TUI for the trash viewer. -func runTrashTUI(items []trash.TrashEntry, trashBase, destDir, cfgPath, modeLabel string) error { - model := newTrashTUIModel(items, trashBase, destDir, cfgPath, modeLabel) +func runTrashTUI(items []trash.TrashEntry, skillTrashBase, agentTrashBase, destDir, agentDestDir, cfgPath, modeLabel string) error { + model := newTrashTUIModel(items, skillTrashBase, agentTrashBase, destDir, agentDestDir, cfgPath, modeLabel) p := tea.NewProgram(model, tea.WithAltScreen(), tea.WithMouseCellMotion()) _, err := p.Run() return err diff --git a/internal/trash/trash.go b/internal/trash/trash.go index bb979209..b9956dd7 100644 --- a/internal/trash/trash.go +++ b/internal/trash/trash.go @@ -79,6 +79,7 @@ type TrashEntry struct { Path string // Full path to trashed directory Date time.Time // Parsed or stat-based date Size int64 // Total size in bytes + Kind string // "skill" or "agent" — set by caller } // MoveToTrash moves a skill directory to the trash. From 11aebd2876f59efe41e434df8dd17ab26adaee7b Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 19:02:56 +0800 Subject: [PATCH 058/205] feat(tui): show agent badge and route agent restore in backup restore TUI Add [A] badge to agent backup entries in the target list, and resolve the correct agent target path when restoring. Agent backups (entries ending with -agents) now look up AgentsConfig or fall back to builtin DefaultAgentTargets for path resolution, both in the restore action and in the detail/diff panels. --- cmd/skillshare/restore_tui.go | 85 +++++++++++++++++++++++++++++++---- 1 file changed, 76 insertions(+), 9 deletions(-) diff --git a/cmd/skillshare/restore_tui.go b/cmd/skillshare/restore_tui.go index 322f2ad6..015f47d0 100644 --- a/cmd/skillshare/restore_tui.go +++ b/cmd/skillshare/restore_tui.go @@ -24,6 +24,16 @@ import ( // Left-right split layout: list on left, detail panel on right. // --------------------------------------------------------------------------- +// isAgentBackupEntry returns true if the backup entry name represents an agent backup. +func isAgentBackupEntry(name string) bool { + return strings.HasSuffix(name, "-agents") +} + +// agentBaseTarget returns the base target name by stripping the "-agents" suffix. +func agentBaseTarget(name string) string { + return strings.TrimSuffix(name, "-agents") +} + // restorePhase tracks which screen is active. type restorePhase int @@ -45,7 +55,11 @@ type restoreTargetItem struct { } func (i restoreTargetItem) Title() string { - return i.summary.TargetName + name := i.summary.TargetName + if isAgentBackupEntry(name) { + return tc.Cyan.Render("[A]") + " " + agentBaseTarget(name) + } + return name } func (i restoreTargetItem) Description() string { return fmt.Sprintf("%d backup(s), latest: %s", @@ -462,14 +476,34 @@ func (m restoreTUIModel) startRestore() (tea.Model, tea.Cmd) { cmd := func() tea.Msg { start := time.Now() - targetCfg, ok := targets[targetName] - if !ok { + + // Resolve destination path — agent backups restore to the agent directory. + var destPath string + if isAgentBackupEntry(targetName) { + baseName := agentBaseTarget(targetName) + if tc, ok := targets[baseName]; ok { + if ac := tc.AgentsConfig(); ac.Path != "" { + destPath = config.ExpandPath(ac.Path) + } + } + if destPath == "" { + builtinAgents := config.DefaultAgentTargets() + if bt, ok := builtinAgents[baseName]; ok { + destPath = config.ExpandPath(bt.Path) + } + } + } else { + if tc, ok := targets[targetName]; ok { + destPath = tc.SkillsConfig().Path + } + } + if destPath == "" { return restoreDoneMsg{err: fmt.Errorf("target '%s' not found in config", targetName)} } backupPath := filepath.Dir(version.Dir) opts := backup.RestoreOptions{Force: true} - err := backup.RestoreToPath(backupPath, targetName, targetCfg.SkillsConfig().Path, opts) + err := backup.RestoreToPath(backupPath, targetName, destPath, opts) e := oplog.NewEntry("restore", statusFromErr(err), time.Since(start)) e.Args = map[string]any{"target": targetName, "from": version.Label, "via": "tui"} @@ -845,8 +879,25 @@ func (m restoreTUIModel) renderTargetDetail(s backup.TargetBackupSummary) string row("Target: ", s.TargetName) - // Target path and current state - if t, ok := m.targets[s.TargetName]; ok { + // Target path and current state — agent entries resolve via AgentsConfig or builtin defaults. + if isAgentBackupEntry(s.TargetName) { + baseName := agentBaseTarget(s.TargetName) + var agentPath string + if t, ok := m.targets[baseName]; ok { + if ac := t.AgentsConfig(); ac.Path != "" { + agentPath = config.ExpandPath(ac.Path) + } + } + if agentPath == "" { + if bt, ok := config.DefaultAgentTargets()[baseName]; ok { + agentPath = config.ExpandPath(bt.Path) + } + } + if agentPath != "" { + row("Path: ", agentPath) + row("Status: ", describeTargetState(agentPath)) + } + } else if t, ok := m.targets[s.TargetName]; ok { sc := t.SkillsConfig() row("Path: ", sc.Path) if sc.Mode != "" { @@ -920,9 +971,25 @@ func (m restoreTUIModel) renderVersionDetail(v backup.BackupVersion) string { row("Size: ", "calculating...") } - // Diff with current target - if t, ok := m.targets[m.selectedTarget]; ok { - added, removed, common := diffSkillSets(v.SkillNames, listDirNames(t.SkillsConfig().Path)) + // Diff with current target — resolve agent path for agent backup entries. + var diffPath string + if isAgentBackupEntry(m.selectedTarget) { + baseName := agentBaseTarget(m.selectedTarget) + if t, ok := m.targets[baseName]; ok { + if ac := t.AgentsConfig(); ac.Path != "" { + diffPath = config.ExpandPath(ac.Path) + } + } + if diffPath == "" { + if bt, ok := config.DefaultAgentTargets()[baseName]; ok { + diffPath = config.ExpandPath(bt.Path) + } + } + } else if t, ok := m.targets[m.selectedTarget]; ok { + diffPath = t.SkillsConfig().Path + } + if diffPath != "" { + added, removed, common := diffSkillSets(v.SkillNames, listDirNames(diffPath)) if len(added) > 0 || len(removed) > 0 { b.WriteString("\n") b.WriteString(tc.Separator.Render("── Diff vs current target ────────────")) From 98bd7d7fc245241d89d41639c7c46f7d432879aa Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 19:04:35 +0800 Subject: [PATCH 059/205] feat(tui): show agent diffs with [A] badge in diff TUI MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Merge agent diffs into skill diff results so both appear together in the diff TUI. Agent entries display a cyan [A] badge prefix in the detail panel's category listings. Changes: - Add mergeAgentDiffsGlobal/mergeAgentDiffsProject helpers that compute agent diffs and merge them into per-target skill results - Call merge helpers in cmdDiffGlobal and cmdDiffProject before TUI/output rendering - Build agent name set in buildDiffDetail and render [A] badge for agent entries in category expansion - Fix categorizeItems to handle agent-specific reasons ('not in target' → New, 'local file' → Local Only) --- cmd/skillshare/diff.go | 7 ++- cmd/skillshare/diff_agents.go | 88 ++++++++++++++++++++++++++++++++++ cmd/skillshare/diff_project.go | 3 ++ cmd/skillshare/diff_tui.go | 14 +++++- 4 files changed, 109 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/diff.go b/cmd/skillshare/diff.go index 86b1677d..7dea90c2 100644 --- a/cmd/skillshare/diff.go +++ b/cmd/skillshare/diff.go @@ -479,6 +479,9 @@ func cmdDiffGlobal(targetName string, kind resourceKindFilter, opts diffRenderOp }) } + // Merge agent diffs into skill results so they appear together + results = mergeAgentDiffsGlobal(cfg, results, targetName) + if opts.jsonOutput { return diffOutputJSONWithExtras(results, extrasResults, start) } @@ -818,7 +821,7 @@ func categorizeItems(items []copyDiffEntry) []actionCategory { for _, item := range items { switch { - case item.reason == "source only": + case item.reason == "source only" || item.reason == "not in target": add("new", "new", "New", item.name) case item.reason == "deleted from target": add("restore", "new", "Restore", item.name) @@ -828,7 +831,7 @@ func categorizeItems(items []copyDiffEntry) []actionCategory { add("override", "override", "Local Override", item.name) case strings.Contains(item.reason, "orphan"): add("orphan", "orphan", "Orphan", item.name) - case item.reason == "local only" || item.reason == "not in source": + case item.reason == "local only" || item.reason == "not in source" || item.reason == "local file": add("local", "local", "Local Only", item.name) default: add("warn", "warn", item.reason, item.name) diff --git a/cmd/skillshare/diff_agents.go b/cmd/skillshare/diff_agents.go index 21669704..3a6641e8 100644 --- a/cmd/skillshare/diff_agents.go +++ b/cmd/skillshare/diff_agents.go @@ -89,6 +89,94 @@ func diffGlobalAgents(cfg *config.Config, targetName string, opts diffRenderOpts return nil } +// mergeAgentDiffsGlobal computes agent diffs for all targets and merges them +// into existing skill diff results. Targets with agent diffs get their items +// appended; targets without a skill result get a new entry. +func mergeAgentDiffsGlobal(cfg *config.Config, results []targetDiffResult, targetName string) []targetDiffResult { + agentsSource := cfg.EffectiveAgentsSource() + agents, _ := resource.AgentKind{}.Discover(agentsSource) + if len(agents) == 0 { + return results + } + + builtinAgents := config.DefaultAgentTargets() + var agentResults []targetDiffResult + for name := range cfg.Targets { + if targetName != "" && name != targetName { + continue + } + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath == "" { + continue + } + agentResults = append(agentResults, computeAgentDiff(name, agentPath, agents)) + } + + return mergeAgentResults(results, agentResults) +} + +// mergeAgentDiffsProject computes agent diffs for project targets and merges +// them into existing skill diff results. +func mergeAgentDiffsProject(root string, results []targetDiffResult, targetName string) []targetDiffResult { + if !projectConfigExists(root) { + return results + } + rt, err := loadProjectRuntime(root) + if err != nil { + return results + } + + agentsSource := rt.agentsSourcePath + agents, _ := resource.AgentKind{}.Discover(agentsSource) + if len(agents) == 0 { + return results + } + + builtinAgents := config.ProjectAgentTargets() + var agentResults []targetDiffResult + for _, entry := range rt.config.Targets { + if targetName != "" && entry.Name != targetName { + continue + } + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, root) + if agentPath == "" { + continue + } + agentResults = append(agentResults, computeAgentDiff(entry.Name, agentPath, agents)) + } + + return mergeAgentResults(results, agentResults) +} + +// mergeAgentResults merges agent diff results into skill results by target name. +func mergeAgentResults(skillResults, agentResults []targetDiffResult) []targetDiffResult { + if len(agentResults) == 0 { + return skillResults + } + + idx := make(map[string]int, len(skillResults)) + for i, r := range skillResults { + idx[r.name] = i + } + + for _, ar := range agentResults { + if len(ar.items) == 0 { + continue + } + if i, ok := idx[ar.name]; ok { + skillResults[i].items = append(skillResults[i].items, ar.items...) + skillResults[i].syncCount += ar.syncCount + skillResults[i].localCount += ar.localCount + if !ar.synced { + skillResults[i].synced = false + } + } else { + skillResults = append(skillResults, ar) + } + } + return skillResults +} + // computeAgentDiff compares source agents against a target directory. func computeAgentDiff(targetName, targetDir string, agents []resource.DiscoveredResource) targetDiffResult { r := targetDiffResult{ diff --git a/cmd/skillshare/diff_project.go b/cmd/skillshare/diff_project.go index 84e5d4e2..11fcc272 100644 --- a/cmd/skillshare/diff_project.go +++ b/cmd/skillshare/diff_project.go @@ -125,6 +125,9 @@ func cmdDiffProject(root, targetName string, kind resourceKindFilter, opts diffR }) } + // Merge agent diffs into skill results so they appear together + results = mergeAgentDiffsProject(root, results, targetName) + if opts.jsonOutput { return diffOutputJSONWithExtras(results, extrasResults, start) } diff --git a/cmd/skillshare/diff_tui.go b/cmd/skillshare/diff_tui.go index 96da1ceb..ea0ae6ec 100644 --- a/cmd/skillshare/diff_tui.go +++ b/cmd/skillshare/diff_tui.go @@ -695,6 +695,14 @@ func (m diffTUIModel) buildDiffDetail() string { return b.String() } + // Build agent name set for [A] badge rendering + agentNames := make(map[string]bool, len(m.cachedItems)) + for _, item := range m.cachedItems { + if item.kind == "agent" { + agentNames[item.name] = true + } + } + // Use cached sorted categories (refreshed on selection change) cats := m.cachedCats for _, cat := range cats { @@ -728,7 +736,11 @@ func (m diffTUIModel) buildDiffDetail() string { if cat.expand { for _, name := range cat.names { - b.WriteString(tc.Dim.Render(" " + name)) + if agentNames[name] { + b.WriteString(" " + tc.Cyan.Render("[A]") + " " + tc.Dim.Render(name)) + } else { + b.WriteString(tc.Dim.Render(" " + name)) + } b.WriteString("\n") } } From 031306af62e07004e746464c8c48eda13179b4a4 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 19:07:21 +0800 Subject: [PATCH 060/205] test(tui): add integration tests for agent TUI features - TestList_Agents_KindFilter_NoTUI: verifies list agents/all/default kind filtering works correctly in --no-tui mode - TestTrash_MergedList_IncludesAgents: verifies uninstalled agents appear in trash agents list --no-tui output --- tests/integration/agent_tui_test.go | 58 +++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 tests/integration/agent_tui_test.go diff --git a/tests/integration/agent_tui_test.go b/tests/integration/agent_tui_test.go new file mode 100644 index 00000000..3cff2705 --- /dev/null +++ b/tests/integration/agent_tui_test.go @@ -0,0 +1,58 @@ +//go:build !online + +package integration + +import ( + "testing" + + "skillshare/internal/testutil" +) + +func TestList_Agents_KindFilter_NoTUI(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Content", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // list agents --no-tui should show only agents + result := sb.RunCLI("list", "agents", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") + result.AssertOutputNotContains(t, "my-skill") + + // list all --no-tui should show both + result = sb.RunCLI("list", "all", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") + result.AssertAnyOutputContains(t, "my-skill") + + // list (default) --no-tui should show only skills + result = sb.RunCLI("list", "--no-tui") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "tutor") + result.AssertAnyOutputContains(t, "my-skill") +} + +func TestTrash_MergedList_IncludesAgents(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + // Uninstall agent to move to trash + sb.RunCLI("uninstall", "agents", "tutor", "--force") + + // Trash agents list --no-tui should show the agent + result := sb.RunCLI("trash", "agents", "list", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "tutor") +} From 5f7d9adad364aa055aa4e5124d40bf845defc207 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 19:14:43 +0800 Subject: [PATCH 061/205] =?UTF-8?q?fix(ui):=20resolve=20TypeScript=20error?= =?UTF-8?q?s=20=E2=80=94=20add=20ResourceTab=20type,=20useSearchParams=20i?= =?UTF-8?q?mport,=20tab=20filtering?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ui/src/components/Layout.tsx | 1 - ui/src/pages/ResourcesPage.tsx | 19 ++++++++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/ui/src/components/Layout.tsx b/ui/src/components/Layout.tsx index 558b7588..c5782b51 100644 --- a/ui/src/components/Layout.tsx +++ b/ui/src/components/Layout.tsx @@ -2,7 +2,6 @@ import { NavLink, Outlet, useNavigate, useLocation } from 'react-router-dom'; import { useState, useCallback, useEffect } from 'react'; import { LayoutDashboard, - Puzzle, Layers, Target, FolderPlus, diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 784ca98b..cc6d5286 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -1,5 +1,5 @@ import { useState, useMemo, useCallback, useEffect, forwardRef, memo, type ReactElement } from 'react'; -import { Link, useNavigate } from 'react-router-dom'; +import { Link, useNavigate, useSearchParams } from 'react-router-dom'; import { Search, GitBranch, @@ -468,6 +468,7 @@ function saveCollapsed(collapsed: Set) { /* -- Filter, Sort & View types -------------------- */ +type ResourceTab = 'skills' | 'agents'; type FilterType = 'all' | 'skills' | 'agents' | 'tracked' | 'github' | 'local'; type SortType = 'name-asc' | 'name-desc' | 'newest' | 'oldest'; type ViewType = 'grid' | 'grouped' | 'table'; @@ -791,6 +792,10 @@ export default function SkillsPage() { return sortSkills(result, sortType); }, [skills, search, filterType, sortType]); + const skillItems = useMemo(() => filtered.filter((s) => s.kind !== 'agent'), [filtered]); + const agentItems = useMemo(() => filtered.filter((s) => s.kind === 'agent'), [filtered]); + const tabFiltered = activeTab === 'agents' ? agentItems : skillItems; + if (isPending) return ; if (error) { return ( @@ -915,7 +920,7 @@ export default function SkillsPage() { {/* Result count — hidden in folder view (merged into folder toolbar) */} {(filterType !== 'all' || search) && viewType !== 'grouped' && (

- Showing {filtered.length} of {skills.length} skills + Showing {tabFiltered.length} of {skills.length} resources {filterType !== 'all' && ( <> {' '} @@ -938,11 +943,11 @@ export default function SkillsPage() { {/* Skills grid / grouped / table view */} - {filtered.length > 0 ? ( + {tabFiltered.length > 0 ? ( viewType === 'grid' ? ( Math.abs(velocity) < 200, }} itemContent={(index) => { - const skill = filtered[index]; + const skill = tabFiltered[index]; return ( ) : viewType === 'grouped' ? ( { setFilterType('all'); setSearch(''); } : undefined} /> ) : ( - + ) ) : ( Date: Tue, 7 Apr 2026 19:35:16 +0800 Subject: [PATCH 062/205] feat(install): detect and install agents from pure-agent repos When a repo has no SKILL.md files but contains .md agents (discovered via Rule 4 fallback in discovery layer), the CLI now installs them instead of showing 'No skills found'. Adds handleAgentInstall() which: - Respects --agent name filter - Uses InstallAgentFromDiscovery() (same as UI flow) - Supports --dry-run and --force - Shows next-steps hint to run 'sync agents' --- cmd/skillshare/install_handlers.go | 66 ++++++++++++++++++++++++++++-- 1 file changed, 63 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index e7ee9e0c..dee0af51 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -221,12 +221,19 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I return logSummary, nil } - // Step 3: Show found skills - if len(discovery.Skills) == 0 { - ui.StepEnd("Found", "No skills (no SKILL.md files)") + // Step 3: Show found resources + if len(discovery.Skills) == 0 && len(discovery.Agents) == 0 { + ui.StepEnd("Found", "No skills or agents found") return logSummary, nil } + // Pure agent repo — no skills, only agents + if len(discovery.Skills) == 0 && len(discovery.Agents) > 0 { + ui.StepEnd("Found", fmt.Sprintf("%d agent(s)", len(discovery.Agents))) + agentsDir := cfg.EffectiveAgentsSource() + return handleAgentInstall(discovery, agentsDir, opts, logSummary) + } + ui.StepEnd("Found", fmt.Sprintf("%d skill(s)", len(discovery.Skills))) // Apply --exclude early so excluded skills never appear in prompts @@ -1011,3 +1018,56 @@ func truncateDesc(s string, max int) string { } return string(runes[:max]) + " ..." } + +// handleAgentInstall installs agents from a pure-agent repo (no SKILL.md files found). +func handleAgentInstall(discovery *install.DiscoveryResult, agentsDir string, opts install.InstallOptions, logSummary installLogSummary) (installLogSummary, error) { + agents := discovery.Agents + + // Apply --agent name filter if specified + if len(opts.AgentNames) > 0 { + nameSet := make(map[string]bool, len(opts.AgentNames)) + for _, n := range opts.AgentNames { + nameSet[n] = true + } + var filtered []install.AgentInfo + for _, a := range agents { + if nameSet[a.Name] { + filtered = append(filtered, a) + } + } + agents = filtered + if len(agents) == 0 { + ui.Info("No matching agents found") + return logSummary, nil + } + } + + installed := 0 + for _, agent := range agents { + spinner := ui.StartSpinner(fmt.Sprintf("Installing agent %s...", agent.Name)) + result, err := install.InstallAgentFromDiscovery(discovery, agent, agentsDir, opts) + spinner.Stop() + if err != nil { + ui.Warning("Failed to install agent %s: %v", agent.Name, err) + continue + } + if result.Action == "skipped" { + ui.StepSkip(agent.Name, strings.Join(result.Warnings, "; ")) + } else if opts.DryRun { + ui.Warning("[dry-run] Would install agent: %s", agent.Name) + } else { + ui.StepDone(agent.Name, agentsDir) + installed++ + } + } + + if installed > 0 { + fmt.Println() + ui.SuccessMsg("Installed %d agent(s) to %s", installed, agentsDir) + ui.SectionLabel("Next Steps") + ui.Info("Run 'skillshare sync agents' to distribute to all targets") + } + + logSummary.SkillCount = installed + return logSummary, nil +} From 9f77ac0b610a28d8275246da76e1e9874908e981 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 19:47:59 +0800 Subject: [PATCH 063/205] feat(install): add TUI multi-select and batch progress for agent install Pure-agent repos now show a checklist TUI for selecting which agents to install (matching the skill install flow). Supports: - Single agent: direct install (no TUI) - Multi-agent: interactive checklist TUI (space/a/enter) - --all/--yes: install all without prompt - -a name1,name2: filter by name without prompt - --dry-run: preview list - Non-TTY: error with hint to use flags - Batch progress: spinner (<=20) or progress bar (>20) - Results display: installed/failed/skipped sections --- cmd/skillshare/install_handlers.go | 218 +++++++++++++++++++++++++---- cmd/skillshare/install_prompt.go | 61 ++++++++ 2 files changed, 250 insertions(+), 29 deletions(-) diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index dee0af51..415624b5 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -1019,55 +1019,215 @@ func truncateDesc(s string, max int) string { return string(runes[:max]) + " ..." } -// handleAgentInstall installs agents from a pure-agent repo (no SKILL.md files found). +// handleAgentInstall installs agents from a pure-agent repo. +// Matches the skill install flow: single→direct, multi→TUI/flags, batch progress. func handleAgentInstall(discovery *install.DiscoveryResult, agentsDir string, opts install.InstallOptions, logSummary installLogSummary) (installLogSummary, error) { agents := discovery.Agents - // Apply --agent name filter if specified - if len(opts.AgentNames) > 0 { - nameSet := make(map[string]bool, len(opts.AgentNames)) - for _, n := range opts.AgentNames { - nameSet[n] = true - } - var filtered []install.AgentInfo - for _, a := range agents { - if nameSet[a.Name] { - filtered = append(filtered, a) - } - } - agents = filtered - if len(agents) == 0 { - ui.Info("No matching agents found") + // Single agent: install directly (matches single-skill pattern) + if len(agents) == 1 && !opts.HasAgentFilter() && !opts.ShouldInstallAll() { + agent := agents[0] + if opts.DryRun { + ui.Info(" %s (%s)", agent.Name, agent.FileName) + ui.Warning("[dry-run] Would install agent: %s", agent.Name) return logSummary, nil } - } - - installed := 0 - for _, agent := range agents { spinner := ui.StartSpinner(fmt.Sprintf("Installing agent %s...", agent.Name)) result, err := install.InstallAgentFromDiscovery(discovery, agent, agentsDir, opts) spinner.Stop() if err != nil { - ui.Warning("Failed to install agent %s: %v", agent.Name, err) - continue + ui.ErrorMsg("Failed to install agent %s: %v", agent.Name, err) + return logSummary, err } if result.Action == "skipped" { ui.StepSkip(agent.Name, strings.Join(result.Warnings, "; ")) - } else if opts.DryRun { - ui.Warning("[dry-run] Would install agent: %s", agent.Name) } else { - ui.StepDone(agent.Name, agentsDir) + ui.SuccessMsg("Installed agent: %s", agent.Name) + logSummary.SkillCount = 1 + logSummary.InstalledSkills = append(logSummary.InstalledSkills, agent.Name) + ui.SectionLabel("Next Steps") + ui.Info("Run 'skillshare sync agents' to distribute to all targets") + } + return logSummary, nil + } + + // Dry-run: show list and return + if opts.DryRun { + selected := agents + if opts.HasAgentFilter() || opts.ShouldInstallAll() { + var err error + selected, err = selectAgents(agents, opts) + if err != nil { + return logSummary, err + } + } + fmt.Println() + for _, a := range selected { + ui.Info(" %s (%s)", a.Name, a.FileName) + } + ui.Warning("[dry-run] Would install %d agent(s)", len(selected)) + return logSummary, nil + } + + // Non-interactive: --all/--yes or -a filter + if opts.HasAgentFilter() || opts.ShouldInstallAll() { + selected, err := selectAgents(agents, opts) + if err != nil { + return logSummary, err + } + fmt.Println() + batchSummary := installSelectedAgents(selected, discovery, agentsDir, opts) + logSummary.InstalledSkills = append(logSummary.InstalledSkills, batchSummary.InstalledSkills...) + logSummary.FailedSkills = append(logSummary.FailedSkills, batchSummary.FailedSkills...) + logSummary.SkillCount = len(logSummary.InstalledSkills) + return logSummary, nil + } + + // Non-TTY fallback + if !ui.IsTTY() { + ui.Info("Found %d agents. Non-interactive mode requires --all, --yes, or -a ", len(agents)) + return logSummary, fmt.Errorf("interactive selection not available in non-TTY mode") + } + + // Interactive TUI selection + fmt.Println() + selected, err := selectAgents(agents, opts) + if err != nil { + return logSummary, err + } + if len(selected) == 0 { + ui.Info("No agents selected") + return logSummary, nil + } + + fmt.Println() + batchSummary := installSelectedAgents(selected, discovery, agentsDir, opts) + logSummary.InstalledSkills = append(logSummary.InstalledSkills, batchSummary.InstalledSkills...) + logSummary.FailedSkills = append(logSummary.FailedSkills, batchSummary.FailedSkills...) + logSummary.SkillCount = len(logSummary.InstalledSkills) + return logSummary, nil +} + +// agentInstallResult tracks the outcome of a single agent install. +type agentInstallResult struct { + agent install.AgentInfo + success bool + skipped bool + message string +} + +// installSelectedAgents installs a batch of agents with progress display. +func installSelectedAgents(selected []install.AgentInfo, discovery *install.DiscoveryResult, agentsDir string, opts install.InstallOptions) installBatchSummary { + results := make([]agentInstallResult, 0, len(selected)) + + var installSpinner *ui.Spinner + var progressBar *ui.ProgressBar + if len(selected) > largeBatchProgressThreshold { + progressBar = ui.StartProgress("Installing agents", len(selected)) + } else { + installSpinner = ui.StartSpinnerWithSteps("Installing...", len(selected)) + } + + for i, agent := range selected { + if installSpinner != nil { + installSpinner.NextStep(fmt.Sprintf("Installing %s...", agent.Name)) + if i == 0 { + installSpinner.Update(fmt.Sprintf("Installing %s...", agent.Name)) + } + } + if progressBar != nil { + progressBar.UpdateTitle(fmt.Sprintf("Installing %s", agent.Name)) + } + + result, err := install.InstallAgentFromDiscovery(discovery, agent, agentsDir, opts) + if err != nil { + results = append(results, agentInstallResult{agent: agent, message: err.Error()}) + } else if result.Action == "skipped" { + results = append(results, agentInstallResult{agent: agent, skipped: true, message: strings.Join(result.Warnings, "; ")}) + } else { + results = append(results, agentInstallResult{agent: agent, success: true}) + } + + if progressBar != nil { + progressBar.Increment() + } + } + + if progressBar != nil { + progressBar.Stop() + } + + displayAgentInstallResults(results, installSpinner) + + summary := installBatchSummary{} + for _, r := range results { + if r.success { + summary.InstalledSkills = append(summary.InstalledSkills, r.agent.Name) + } else if !r.skipped { + summary.FailedSkills = append(summary.FailedSkills, r.agent.Name) + } + } + return summary +} + +// displayAgentInstallResults renders the install outcome for a batch of agents. +func displayAgentInstallResults(results []agentInstallResult, spinner *ui.Spinner) { + var installed, failed, skippedCount int + for _, r := range results { + switch { + case r.success: installed++ + case r.skipped: + skippedCount++ + default: + failed++ + } + } + + summaryMsg := buildInstallSummary(installed, failed, skippedCount) + if spinner != nil { + switch { + case failed > 0 && installed == 0: + spinner.Fail(summaryMsg) + case failed > 0: + spinner.Warn(summaryMsg) + default: + spinner.Success(summaryMsg) + } + } else { + fmt.Println() + if failed > 0 && installed == 0 { + ui.ErrorMsg("%s", summaryMsg) + } else { + ui.SuccessMsg("%s", summaryMsg) } } + if failed > 0 { + ui.SectionLabel("Failed") + for _, r := range results { + if !r.success && !r.skipped { + ui.StepFail(r.agent.Name, r.message) + } + } + } + if skippedCount > 0 { + ui.SectionLabel("Skipped") + for _, r := range results { + if r.skipped { + ui.StepSkip(r.agent.Name, r.message) + } + } + } if installed > 0 { + ui.SectionLabel("Installed") + for _, r := range results { + if r.success { + ui.StepDone(r.agent.Name, "") + } + } fmt.Println() - ui.SuccessMsg("Installed %d agent(s) to %s", installed, agentsDir) ui.SectionLabel("Next Steps") ui.Info("Run 'skillshare sync agents' to distribute to all targets") } - - logSummary.SkillCount = installed - return logSummary, nil } diff --git a/cmd/skillshare/install_prompt.go b/cmd/skillshare/install_prompt.go index 5632d2de..20965286 100644 --- a/cmd/skillshare/install_prompt.go +++ b/cmd/skillshare/install_prompt.go @@ -285,3 +285,64 @@ func printSkillListCompact(skills []install.SkillInfo) { } ui.Info("... and %d more skill(s)", len(skills)-showCount) } + +// selectAgents routes agent selection through filter, all, or interactive TUI. +func selectAgents(agents []install.AgentInfo, opts install.InstallOptions) ([]install.AgentInfo, error) { + switch { + case opts.HasAgentFilter(): + matched, notFound := filterAgentsByName(agents, opts.AgentNames) + if len(notFound) > 0 { + return nil, fmt.Errorf("agents not found: %s", strings.Join(notFound, ", ")) + } + return matched, nil + case opts.ShouldInstallAll(): + return agents, nil + default: + return promptAgentInstallSelection(agents) + } +} + +// filterAgentsByName returns agents matching any of the given names (case-insensitive). +func filterAgentsByName(agents []install.AgentInfo, names []string) (matched []install.AgentInfo, notFound []string) { + nameSet := make(map[string]bool, len(names)) + for _, n := range names { + nameSet[strings.ToLower(n)] = true + } + found := make(map[string]bool) + for _, a := range agents { + if nameSet[strings.ToLower(a.Name)] { + matched = append(matched, a) + found[strings.ToLower(a.Name)] = true + } + } + for _, n := range names { + if !found[strings.ToLower(n)] { + notFound = append(notFound, n) + } + } + return +} + +// promptAgentInstallSelection shows a multi-select TUI for agent installation. +func promptAgentInstallSelection(agents []install.AgentInfo) ([]install.AgentInfo, error) { + items := make([]checklistItemData, len(agents)) + for i, a := range agents { + items[i] = checklistItemData{label: a.Name, desc: a.FileName} + } + indices, err := runChecklistTUI(checklistConfig{ + title: "Select agents to install", + items: items, + itemName: "agent", + }) + if err != nil { + return nil, err + } + if indices == nil { + return nil, nil // cancelled + } + selected := make([]install.AgentInfo, len(indices)) + for i, idx := range indices { + selected[i] = agents[idx] + } + return selected, nil +} From e2a3d7024cad9669ef56aa83389a2abce92bd281 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 20:17:46 +0800 Subject: [PATCH 064/205] fix: install agents from mixed repos after skills Mixed repos containing both skills/ and agents/ directories previously only installed skills, silently ignoring agents. Now after every skill install exit point, discovered agents are also installed to the agents source directory. - Add installDiscoveredAgents helper that installs agents post-skills - Update 'Found' message to show agent count in mixed repos - Call helper at all 4 skill install exit points in handleGitInstall - Add integration test for mixed-repo install scenario --- cmd/skillshare/install_handlers.go | 41 ++++++++++++++++++++++++- tests/integration/install_agent_test.go | 41 +++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index 415624b5..2df82e72 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -218,6 +218,7 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I logSummary.InstalledSkills = append(logSummary.InstalledSkills, skill.Name) logSummary.SkillCount = len(logSummary.InstalledSkills) } + installDiscoveredAgents(discovery, cfg, opts) return logSummary, nil } @@ -234,7 +235,11 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I return handleAgentInstall(discovery, agentsDir, opts, logSummary) } - ui.StepEnd("Found", fmt.Sprintf("%d skill(s)", len(discovery.Skills))) + foundMsg := fmt.Sprintf("%d skill(s)", len(discovery.Skills)) + if len(discovery.Agents) > 0 { + foundMsg += fmt.Sprintf(", %d agent(s)", len(discovery.Agents)) + } + ui.StepEnd("Found", foundMsg) // Apply --exclude early so excluded skills never appear in prompts if len(opts.Exclude) > 0 { @@ -302,6 +307,7 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I logSummary.InstalledSkills = append(logSummary.InstalledSkills, skill.Name) logSummary.SkillCount = len(logSummary.InstalledSkills) } + installDiscoveredAgents(discovery, cfg, opts) return logSummary, nil } @@ -326,6 +332,7 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I logSummary.InstalledSkills = append(logSummary.InstalledSkills, batchSummary.InstalledSkills...) logSummary.FailedSkills = append(logSummary.FailedSkills, batchSummary.FailedSkills...) logSummary.SkillCount = len(logSummary.InstalledSkills) + installDiscoveredAgents(discovery, cfg, opts) return logSummary, nil } @@ -361,6 +368,7 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I logSummary.InstalledSkills = append(logSummary.InstalledSkills, batchSummary.InstalledSkills...) logSummary.FailedSkills = append(logSummary.FailedSkills, batchSummary.FailedSkills...) logSummary.SkillCount = len(logSummary.InstalledSkills) + installDiscoveredAgents(discovery, cfg, opts) return logSummary, nil } @@ -1108,6 +1116,37 @@ func handleAgentInstall(discovery *install.DiscoveryResult, agentsDir string, op return logSummary, nil } +// installDiscoveredAgents installs agents from a mixed repo after skills have been installed. +func installDiscoveredAgents(discovery *install.DiscoveryResult, cfg *config.Config, opts install.InstallOptions) { + if len(discovery.Agents) == 0 { + return + } + if opts.Kind == "skill" { + return + } + + agentsDir := cfg.EffectiveAgentsSource() + fmt.Println() + ui.Header("Installing agents") + + for _, agent := range discovery.Agents { + spinner := ui.StartSpinner(fmt.Sprintf("Installing agent %s...", agent.Name)) + result, err := install.InstallAgentFromDiscovery(discovery, agent, agentsDir, opts) + spinner.Stop() + if err != nil { + ui.ErrorMsg("Failed to install agent %s: %v", agent.Name, err) + continue + } + if result.Action == "skipped" { + ui.StepSkip(agent.Name, strings.Join(result.Warnings, "; ")) + } else if opts.DryRun { + ui.Warning("[dry-run] Would install agent: %s", agent.Name) + } else { + ui.SuccessMsg("Installed agent: %s", agent.Name) + } + } +} + // agentInstallResult tracks the outcome of a single agent install. type agentInstallResult struct { agent install.AgentInfo diff --git a/tests/integration/install_agent_test.go b/tests/integration/install_agent_test.go index fe964b0e..d471f9f8 100644 --- a/tests/integration/install_agent_test.go +++ b/tests/integration/install_agent_test.go @@ -128,3 +128,44 @@ targets: {} result := sb.RunCLI("uninstall", "-g", "agents", "nonexistent") result.AssertOutputNotContains(t, "unknown option") } + +func TestInstall_MixedRepo_InstallsAgentsToAgentsDir(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // Create a git repo with both skills and agents + repoDir := filepath.Join(sb.Home, "mixed-repo") + os.MkdirAll(filepath.Join(repoDir, "skills", "my-skill"), 0755) + os.WriteFile(filepath.Join(repoDir, "skills", "my-skill", "SKILL.md"), + []byte("---\nname: my-skill\n---\n# My Skill"), 0644) + os.MkdirAll(filepath.Join(repoDir, "agents"), 0755) + os.WriteFile(filepath.Join(repoDir, "agents", "my-agent.md"), + []byte("# My Agent"), 0644) + initGitRepo(t, repoDir) + + result := sb.RunCLI("install", "file://"+repoDir, "--yes") + result.AssertSuccess(t) + + // Skill should be in skills source + skillPath := filepath.Join(sb.SourcePath, "my-skill") + if !sb.FileExists(filepath.Join(skillPath, "SKILL.md")) { + t.Error("skill should be installed to skills source dir") + } + + // Agent should be in agents source (NOT skills source) + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + agentPath := filepath.Join(agentsDir, "my-agent.md") + if !sb.FileExists(agentPath) { + t.Errorf("agent should be installed to agents dir (%s), not skills dir", agentsDir) + } + + // Agent should NOT be in skills source + wrongPath := filepath.Join(sb.SourcePath, "my-agent.md") + if sb.FileExists(wrongPath) { + t.Error("agent should NOT be in skills source dir") + } +} From 73d4d32dfe28bf1ca4b4bec2117201beb7516bbf Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 20:22:30 +0800 Subject: [PATCH 065/205] feat(sync): warn when targets are skipped for agents (no agents path) Both syncAgentsGlobal and syncAgentsProject now collect target names where agentPath is empty and print a summary warning after the sync summary line. This makes it visible to users which targets were silently skipped, helping them configure agents paths if desired. --- cmd/skillshare/sync_agents.go | 16 +++++++++ tests/integration/sync_agent_test.go | 50 ++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 tests/integration/sync_agent_test.go diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index c96c37a6..8194d979 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -4,6 +4,8 @@ import ( "fmt" "os" "path/filepath" + "sort" + "strings" "time" "skillshare/internal/config" @@ -57,10 +59,12 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start builtinAgents := config.DefaultAgentTargets() var totals agentSyncStats var syncErr error + var skippedTargets []string for name := range cfg.Targets { agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) if agentPath == "" { + skippedTargets = append(skippedTargets, name) continue } @@ -81,6 +85,11 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start ui.Info("Agent sync: %d linked, %d local, %d updated, %d pruned (%s)", totals.linked, totals.skipped, totals.updated, totals.pruned, formatDuration(start)) + if len(skippedTargets) > 0 { + sort.Strings(skippedTargets) + ui.Warning("%d target(s) skipped for agents (no agents path): %s", + len(skippedTargets), strings.Join(skippedTargets, ", ")) + } } return totals, syncErr @@ -135,6 +144,7 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start builtinAgents := config.ProjectAgentTargets() var totals agentSyncStats var syncErr error + var skippedTargets []string // Load project config for target list projCfg, loadErr := config.LoadProject(projectRoot) @@ -145,6 +155,7 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start for _, entry := range projCfg.Targets { agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, projectRoot) if agentPath == "" { + skippedTargets = append(skippedTargets, entry.Name) continue } @@ -164,6 +175,11 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start ui.Info("Project agent sync: %d linked, %d local, %d updated, %d pruned (%s)", totals.linked, totals.skipped, totals.updated, totals.pruned, formatDuration(start)) + if len(skippedTargets) > 0 { + sort.Strings(skippedTargets) + ui.Warning("%d target(s) skipped for agents (no agents path): %s", + len(skippedTargets), strings.Join(skippedTargets, ", ")) + } } return syncErr diff --git a/tests/integration/sync_agent_test.go b/tests/integration/sync_agent_test.go new file mode 100644 index 00000000..72c9b79e --- /dev/null +++ b/tests/integration/sync_agent_test.go @@ -0,0 +1,50 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/testutil" +) + +func TestSync_Agents_SkipsTargetsWithoutAgentsPath(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + // Create agents source with an agent + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "helper.md"), []byte("# Helper"), 0644) + + // Configure a target WITH agents path and one WITHOUT + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + windsurf := filepath.Join(sb.Home, ".windsurf", "skills") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + windsurf: + skills: + path: "` + windsurf + `" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Agent should be synced to claude + if !sb.FileExists(filepath.Join(claudeAgents, "helper.md")) { + t.Error("agent should be synced to claude agents dir") + } + + // Warning should mention windsurf was skipped + result.AssertAnyOutputContains(t, "skipped") + result.AssertAnyOutputContains(t, "windsurf") +} From de49f31a39b8a2a81976fff774f1c2ec478b1d8e Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 20:27:31 +0800 Subject: [PATCH 066/205] test: add E2E test for mixed-repo install then sync to correct targets MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Validates the full agent workflow: install mixed repo (skills + agents) then sync all — verifying skills go to skills targets, agents go to agent-capable targets, agents are excluded from targets without agents path, and the skip warning mentions the target name. Uses 'sync all' since plain 'sync' only syncs skills by design. --- tests/integration/install_agent_test.go | 62 +++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/tests/integration/install_agent_test.go b/tests/integration/install_agent_test.go index d471f9f8..7811b1ea 100644 --- a/tests/integration/install_agent_test.go +++ b/tests/integration/install_agent_test.go @@ -129,6 +129,68 @@ targets: {} result.AssertOutputNotContains(t, "unknown option") } +func TestInstall_MixedRepo_ThenSync_AgentsGoToCorrectTargets(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + windsurf := filepath.Join(sb.Home, ".windsurf", "skills") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + windsurf: + skills: + path: "` + windsurf + `" +`) + + // Create mixed repo with both skills and agents + repoDir := filepath.Join(sb.Home, "mixed-repo") + os.MkdirAll(filepath.Join(repoDir, "skills", "my-skill"), 0755) + os.WriteFile(filepath.Join(repoDir, "skills", "my-skill", "SKILL.md"), + []byte("---\nname: my-skill\n---\n# My Skill"), 0644) + os.MkdirAll(filepath.Join(repoDir, "agents"), 0755) + os.WriteFile(filepath.Join(repoDir, "agents", "my-agent.md"), + []byte("# My Agent"), 0644) + initGitRepo(t, repoDir) + + // Install + installResult := sb.RunCLI("install", "file://"+repoDir, "--yes") + installResult.AssertSuccess(t) + + // Sync all (skills + agents) + syncResult := sb.RunCLI("sync", "all") + syncResult.AssertSuccess(t) + + // Skill in claude skills target + if !sb.FileExists(filepath.Join(claudeSkills, "my-skill", "SKILL.md")) { + t.Error("skill should be synced to claude skills dir") + } + + // Agent in claude agents target + if !sb.FileExists(filepath.Join(claudeAgents, "my-agent.md")) { + t.Error("agent should be synced to claude agents dir") + } + + // Skill in windsurf (skills support) + if !sb.FileExists(filepath.Join(windsurf, "my-skill", "SKILL.md")) { + t.Error("skill should be synced to windsurf skills dir") + } + + // Agent NOT in windsurf skills (no agents path) + if sb.FileExists(filepath.Join(windsurf, "my-agent.md")) { + t.Error("agent should NOT be in windsurf skills dir") + } + + // Warning about skipped target + syncResult.AssertAnyOutputContains(t, "windsurf") +} + func TestInstall_MixedRepo_InstallsAgentsToAgentsDir(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() From 9d934f87d36cd39017a81a232065958af88a7921 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 20:45:40 +0800 Subject: [PATCH 067/205] fix: pass AgentsSource to Config in project mode install Without this, installDiscoveredAgents in project mode falls back to the global agents path (~/.config/skillshare/agents/) instead of the project agents path (.skillshare/agents/). --- cmd/skillshare/install_project.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/skillshare/install_project.go b/cmd/skillshare/install_project.go index 8c14f8e9..8660259e 100644 --- a/cmd/skillshare/install_project.go +++ b/cmd/skillshare/install_project.go @@ -56,7 +56,7 @@ func cmdInstallProject(args []string, root string) (installLogSummary, error) { return installFromProjectConfig(runtime, parsed.opts) } - cfg := &config.Config{Source: runtime.sourcePath, GitLabHosts: runtime.config.GitLabHosts} + cfg := &config.Config{Source: runtime.sourcePath, AgentsSource: runtime.agentsSourcePath, GitLabHosts: runtime.config.GitLabHosts} source, resolvedFromMeta, err := resolveInstallSource(parsed.sourceArg, parsed.opts, cfg) if err == nil && parsed.opts.Branch != "" { source.Branch = parsed.opts.Branch From 5afaf57745d87d141aa6f51b518314565cbb1c2e Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 21:53:44 +0800 Subject: [PATCH 068/205] feat(tui): add tab switching between skills/agents in list TUI - Add Tab/Shift+Tab to cycle All/Skills/Agents tabs with count badges - Tab bar renders at top with bold+underline cyan active, dim inactive - Tab acts as pre-filter composing with text filter via AND logic - Cache tab-filtered items in applyFilter() to avoid per-frame recomputation - Add tabNoun() method to consolidate noun switches across title/filter bar Agent install --into support: - Support --into flag for agent install (CLI, server, project mode) - Support --into=value syntax in addition to --into value - Server handler_skills uses recursive AgentKind.Discover() for nested agents - Fix agent metadata read path: use file basename instead of frontmatter name - Sync --all flag now includes agent sync (not just extras) Agent content viewer: - Agents render full-width without sidebar (single .md file, no tree needed) - Simplified help text and keyboard handling for agent content view Agent uninstall fixes: - Use discovery-based resolution instead of hardcoded name+.md path lookup - Support matching by Name, FlatName, RelPath, or trimmed RelPath - Compressed confirmation display (list format, max 20 shown) UI polish: - Help bar keys highlighted in dim cyan, descriptions in dim gray - Align 'No items.' text with list title via PaddingLeft(2) --- .../install_mixed_repo_agents_runbook.md | 200 ++++++++++++++++++ cmd/skillshare/install.go | 10 + cmd/skillshare/install_handlers.go | 4 +- cmd/skillshare/list.go | 22 +- cmd/skillshare/list_project.go | 17 +- cmd/skillshare/list_tui.go | 188 +++++++++++++--- cmd/skillshare/list_tui_content.go | 79 +++++-- cmd/skillshare/list_tui_test.go | 128 ++++++++++- cmd/skillshare/sync.go | 10 +- cmd/skillshare/tui_colors.go | 10 +- cmd/skillshare/tui_helpers.go | 25 +++ cmd/skillshare/uninstall_agents.go | 81 ++++--- internal/server/handler_install.go | 3 + internal/server/handler_skills.go | 56 ++--- internal/ui/pterm.go | 40 ++++ 15 files changed, 732 insertions(+), 141 deletions(-) create mode 100644 ai_docs/tests/install_mixed_repo_agents_runbook.md diff --git a/ai_docs/tests/install_mixed_repo_agents_runbook.md b/ai_docs/tests/install_mixed_repo_agents_runbook.md new file mode 100644 index 00000000..311e78e0 --- /dev/null +++ b/ai_docs/tests/install_mixed_repo_agents_runbook.md @@ -0,0 +1,200 @@ +# CLI E2E Runbook: Install Mixed Repo (Skills + Agents) + +Validates that `skillshare install` from a repo containing both skills and agents +installs skills to the skills source and agents to the agents source, then sync +distributes agents only to targets with an agents path configured. + +**Origin**: Bug fix — agents from mixed repos were incorrectly ignored or installed to skills dir. + +## Scope + +- Mixed repo install: skills go to `~/.config/skillshare/skills/`, agents go to `~/.config/skillshare/agents/` +- Pure agent repo install: agents go to agents dir +- Sync agents: targets with agents path receive agents, targets without are skipped with warning +- Project mode: agents go to `.skillshare/agents/`, not global agents dir + +## Environment + +Run inside devcontainer via mdproof (no ssenv wrapper needed). +All global commands use `-g` to force global mode. + +## Steps + +### 1. Create mixed git repo with skills and agents + +```bash +rm -rf /tmp/mixed-repo +mkdir -p /tmp/mixed-repo/skills/demo-skill /tmp/mixed-repo/agents +cat > /tmp/mixed-repo/skills/demo-skill/SKILL.md <<'EOF' +--- +name: demo-skill +--- +# Demo Skill +A demo skill for testing. +EOF +cat > /tmp/mixed-repo/agents/demo-agent.md <<'EOF' +--- +name: demo-agent +description: A demo agent +--- +# Demo Agent +A demo agent for testing. +EOF +cd /tmp/mixed-repo && git init && git config user.email "test@test.com" && git config user.name "test" && git add -A && git commit -m "init" 2>&1 +ls skills/demo-skill/SKILL.md agents/demo-agent.md +``` + +Expected: +- exit_code: 0 +- SKILL.md +- demo-agent.md + +### 2. Install mixed repo — both skills and agents found + +```bash +ss install -g file:///tmp/mixed-repo --yes --force +``` + +Expected: +- exit_code: 0 +- regex: 1 skill\(s\), 1 agent\(s\) +- Installed: demo-skill +- Installed agent: demo-agent + +### 3. Verify skill in skills source, agent in agents source + +```bash +SKILLS_DIR=~/.config/skillshare/skills +AGENTS_DIR=~/.config/skillshare/agents +test -f "$SKILLS_DIR/demo-skill/SKILL.md" && echo "skill: in skills dir" || echo "skill: MISSING" +test -f "$AGENTS_DIR/demo-agent.md" && echo "agent: in agents dir" || echo "agent: MISSING" +test -f "$SKILLS_DIR/demo-agent.md" && echo "agent: WRONG in skills dir" || echo "agent: not in skills dir (correct)" +``` + +Expected: +- exit_code: 0 +- skill: in skills dir +- agent: in agents dir +- agent: not in skills dir (correct) +- Not MISSING +- Not WRONG + +### 4. Sync all — agents go to targets with agents path + +```bash +ss sync all -g +``` + +Expected: +- exit_code: 0 + +### 5. Verify agents synced to claude (has agents path) but not to targets without + +```bash +CLAUDE_AGENTS=~/.claude/agents +test -L "$CLAUDE_AGENTS/demo-agent.md" && echo "claude: agent synced" || echo "claude: agent MISSING" +``` + +Expected: +- exit_code: 0 +- claude: agent synced +- Not MISSING + +### 6. Sync agents — warning lists targets without agents path + +```bash +ss sync agents -g 2>&1 +``` + +Expected: +- exit_code: 0 +- regex: skipped for agents + +### 7. Create pure agent repo and install + +```bash +rm -rf /tmp/agent-only-repo +mkdir -p /tmp/agent-only-repo/agents +cat > /tmp/agent-only-repo/agents/helper.md <<'EOF' +--- +name: helper +description: A helper agent +--- +# Helper Agent +EOF +cd /tmp/agent-only-repo && git init && git config user.email "test@test.com" && git config user.name "test" && git add -A && git commit -m "init" 2>&1 +ss install -g file:///tmp/agent-only-repo --yes --force +``` + +Expected: +- exit_code: 0 +- regex: 1 agent\(s\) +- helper + +### 8. Verify pure agent repo installed to agents dir + +```bash +AGENTS_DIR=~/.config/skillshare/agents +test -f "$AGENTS_DIR/helper.md" && echo "helper: in agents dir" || echo "helper: MISSING" +SKILLS_DIR=~/.config/skillshare/skills +test -d "$SKILLS_DIR/helper" && echo "helper: WRONG in skills dir" || echo "helper: not in skills dir (correct)" +``` + +Expected: +- exit_code: 0 +- helper: in agents dir +- helper: not in skills dir (correct) +- Not MISSING +- Not WRONG + +### 9. Project mode — install mixed repo to project agents dir + +```bash +rm -rf /tmp/test-project +mkdir -p /tmp/test-project +cd /tmp/test-project +ss init -p --targets claude 2>&1 +ss install -p file:///tmp/mixed-repo --yes --force 2>&1 +``` + +Expected: +- exit_code: 0 +- Installed: demo-skill +- Installed agent: demo-agent + +### 10. Verify project mode paths + +```bash +cd /tmp/test-project +test -f .skillshare/skills/demo-skill/SKILL.md && echo "project skill: correct" || echo "project skill: MISSING" +test -f .skillshare/agents/demo-agent.md && echo "project agent: correct" || echo "project agent: MISSING" +GLOBAL_AGENTS=~/.config/skillshare/agents +test -f "$GLOBAL_AGENTS/demo-agent.md" && echo "global agent: EXISTS (wrong for project install)" || echo "global agent: not there (correct)" +``` + +Expected: +- exit_code: 0 +- project skill: correct +- project agent: correct +- Not MISSING + +### 11. Cleanup + +```bash +rm -rf /tmp/mixed-repo /tmp/agent-only-repo /tmp/test-project +ss uninstall demo-skill --force -g 2>/dev/null || true +ss uninstall agents --all --force -g 2>/dev/null || true +``` + +Expected: +- exit_code: 0 + +## Pass Criteria + +- [ ] Mixed repo install shows "N skill(s), N agent(s)" in Found message +- [ ] Skills installed to skills source dir +- [ ] Agents installed to agents source dir (not skills dir) +- [ ] Pure agent repo installs agents correctly +- [ ] Sync distributes agents to targets with agents path +- [ ] Sync shows warning listing targets without agents path +- [ ] Project mode install puts agents in `.skillshare/agents/`, not global diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index cf304bb5..8a3ed705 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -132,6 +132,8 @@ func parseInstallArgs(args []string) (*installArgs, bool, error) { } i++ result.opts.Into = args[i] + case strings.HasPrefix(arg, "--into="): + result.opts.Into = strings.TrimPrefix(arg, "--into=") case arg == "--all": result.opts.All = true case arg == "--yes" || arg == "-y": @@ -216,6 +218,14 @@ func destWithInto(sourceDir string, opts install.InstallOptions, skillName strin return filepath.Join(sourceDir, skillName) } +// agentsDirWithInto returns agentsDir joined with opts.Into (if set). +func agentsDirWithInto(agentsDir string, opts install.InstallOptions) string { + if opts.Into != "" { + return filepath.Join(agentsDir, opts.Into) + } + return agentsDir +} + // ensureIntoDirExists creates the Into subdirectory if opts.Into is set. func ensureIntoDirExists(sourceDir string, opts install.InstallOptions) error { if opts.Into == "" { diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index 2df82e72..7ca7751b 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -231,7 +231,7 @@ func handleGitInstall(source *install.Source, cfg *config.Config, opts install.I // Pure agent repo — no skills, only agents if len(discovery.Skills) == 0 && len(discovery.Agents) > 0 { ui.StepEnd("Found", fmt.Sprintf("%d agent(s)", len(discovery.Agents))) - agentsDir := cfg.EffectiveAgentsSource() + agentsDir := agentsDirWithInto(cfg.EffectiveAgentsSource(), opts) return handleAgentInstall(discovery, agentsDir, opts, logSummary) } @@ -1125,7 +1125,7 @@ func installDiscoveredAgents(discovery *install.DiscoveryResult, cfg *config.Con return } - agentsDir := cfg.EffectiveAgentsSource() + agentsDir := agentsDirWithInto(cfg.EffectiveAgentsSource(), opts) fmt.Println() ui.Header("Installing agents") diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 665a4f8f..179f74d7 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -284,8 +284,9 @@ func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { IsNested: d.IsNested, Disabled: d.Disabled, } - // Read sidecar metadata: .skillshare-meta.json - metaPath := filepath.Join(agentsSource, d.Name+".skillshare-meta.json") + // Read sidecar metadata: .skillshare-meta.json (alongside the .md file) + baseName := strings.TrimSuffix(filepath.Base(d.RelPath), ".md") + metaPath := filepath.Join(filepath.Dir(d.SourcePath), baseName+".skillshare-meta.json") if data, readErr := os.ReadFile(metaPath); readErr == nil { var meta install.SkillMeta if jsonErr := json.Unmarshal(data, &meta); jsonErr == nil { @@ -575,17 +576,14 @@ func cmdList(args []string) error { // TTY + not JSON + TUI enabled → launch TUI with async loading (no blank screen) if !opts.JSON && shouldLaunchTUI(opts.NoTUI, cfg) { loadFn := func() listLoadResult { + // Always load both skills and agents — tab UI filters the view. var allEntries []skillEntry - if kind.IncludesSkills() { - discovered, discErr := sync.DiscoverSourceSkillsAll(cfg.Source) - if discErr != nil { - return listLoadResult{err: fmt.Errorf("cannot discover skills: %w", discErr)} - } - allEntries = append(allEntries, buildSkillEntries(discovered)...) - } - if kind.IncludesAgents() { - allEntries = append(allEntries, discoverAndBuildAgentEntries(cfg.EffectiveAgentsSource())...) + discovered, discErr := sync.DiscoverSourceSkillsAll(cfg.Source) + if discErr != nil { + return listLoadResult{err: fmt.Errorf("cannot discover skills: %w", discErr)} } + allEntries = append(allEntries, buildSkillEntries(discovered)...) + allEntries = append(allEntries, discoverAndBuildAgentEntries(cfg.EffectiveAgentsSource())...) total := len(allEntries) allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) if opts.SortBy != "" { @@ -593,7 +591,7 @@ func cmdList(args []string) error { } return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } - action, skillName, skillKind, err := runListTUI(loadFn, "global", cfg.Source, cfg.EffectiveAgentsSource(), cfg.Targets) + action, skillName, skillKind, err := runListTUI(loadFn, "global", cfg.Source, cfg.EffectiveAgentsSource(), cfg.Targets, kind) if err != nil { return err } diff --git a/cmd/skillshare/list_project.go b/cmd/skillshare/list_project.go index 87d555cc..a8b9308b 100644 --- a/cmd/skillshare/list_project.go +++ b/cmd/skillshare/list_project.go @@ -38,23 +38,20 @@ func cmdListProject(root string, opts listOptions, kind resourceKindFilter) erro sortBy = "name" } loadFn := func() listLoadResult { + // Always load both skills and agents — tab UI filters the view. var allEntries []skillEntry - if kind.IncludesSkills() { - discovered, err := sync.DiscoverSourceSkillsAll(skillsSource) - if err != nil { - return listLoadResult{err: fmt.Errorf("cannot discover project skills: %w", err)} - } - allEntries = append(allEntries, buildSkillEntries(discovered)...) - } - if kind.IncludesAgents() { - allEntries = append(allEntries, discoverAndBuildAgentEntries(agentsSource)...) + discovered, err := sync.DiscoverSourceSkillsAll(skillsSource) + if err != nil { + return listLoadResult{err: fmt.Errorf("cannot discover project skills: %w", err)} } + allEntries = append(allEntries, buildSkillEntries(discovered)...) + allEntries = append(allEntries, discoverAndBuildAgentEntries(agentsSource)...) total := len(allEntries) allEntries = filterSkillEntries(allEntries, opts.Pattern, opts.TypeFilter) sortSkillEntries(allEntries, sortBy) return listLoadResult{skills: toSkillItems(allEntries), totalCount: total} } - action, skillName, skillKind, err := runListTUI(loadFn, "project", skillsSource, agentsSource, targets) + action, skillName, skillKind, err := runListTUI(loadFn, "project", skillsSource, agentsSource, targets, kind) if err != nil { return err } diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index b677f816..499c6039 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -23,6 +23,15 @@ import ( // Keeps the widget fast — pagination + filter operate on at most this many items. const maxListItems = 1000 +// listTab represents the active tab filter in the list TUI. +type listTab int + +const ( + listTabAll listTab = iota // show all items (skills + agents) + listTabSkills // show skills only + listTabAgents // show agents only +) + // applyTUIFilterStyle sets filter prompt, cursor, and input cursor to the shared style. func applyTUIFilterStyle(l *list.Model) { l.Styles.FilterPrompt = tc.Filter @@ -78,6 +87,11 @@ type listTUIModel struct { loadErr error // non-nil if loading failed emptyResult bool // true when async load returned zero skills + // Tab filter — pre-filters allItems by kind (All / Skills / Agents) + activeTab listTab // currently selected tab + tabCounts [3]int // cached counts: [all, skills, agents] + tabFiltered []skillItem // cached result of tabFilteredItems(); set by applyFilter() + // Application-level filter — replaces bubbles/list built-in fuzzy filter // to avoid O(N*M) fuzzy scan on 100k+ items every keystroke. allItems []skillItem // full item set (kept in memory, never passed to list) @@ -108,7 +122,7 @@ type listTUIModel struct { // newListTUIModel creates a new TUI model. // When loadFn is non-nil, skills are loaded asynchronously inside the TUI (spinner shown). // When loadFn is nil, skills/totalCount are used directly (pre-loaded). -func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig) listTUIModel { +func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig, initialKind resourceKindFilter) listTUIModel { delegate := listSkillDelegate{} // Build initial item set (empty if async loading) @@ -123,7 +137,8 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode l := list.New(items, delegate, 0, 0) l.Title = fmt.Sprintf("Installed skills (%s)", modeLabel) l.Styles.Title = tc.ListTitle - l.SetShowStatusBar(false) // we render our own status with real total count + l.Styles.NoItems = l.Styles.NoItems.PaddingLeft(2) // align with title + l.SetShowStatusBar(false) // we render our own status with real total count l.SetFilteringEnabled(false) // application-level filter replaces built-in l.SetShowHelp(false) // we render our own help l.SetShowPagination(false) // we render page info in our status line @@ -140,6 +155,17 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode fi.Cursor.Style = tc.Filter fi.Placeholder = "filter or t:tracked g:group r:repo k:kind" + // Map CLI kind filter to initial tab + var initTab listTab + switch initialKind { + case kindAgents: + initTab = listTabAgents + case kindSkills: + initTab = listTabSkills + default: + initTab = listTabAll + } + m := listTUIModel{ list: l, totalCount: totalCount, @@ -147,6 +173,7 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode sourcePath: sourcePath, agentsSourcePath: agentsSourcePath, targets: targets, + activeTab: initTab, detailCache: make(map[string]*detailData), loading: loadFn != nil, loadSpinner: sp, @@ -155,13 +182,64 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode matchCount: len(allItems), filterInput: fi, } - // Skip initial group header (index 0) if loadFn == nil { + m.recomputeTabCounts() + m.applyFilter() // applies tab + text filter skipGroupItem(&m.list, 1) } + m.updateTitle() return m } +// recomputeTabCounts updates the cached per-tab counts from allItems. +func (m *listTUIModel) recomputeTabCounts() { + var skills, agents int + for _, item := range m.allItems { + if item.entry.Kind == "agent" { + agents++ + } else { + skills++ + } + } + m.tabCounts = [3]int{len(m.allItems), skills, agents} +} + +// tabFilteredItems returns the subset of allItems matching the active tab. +func (m *listTUIModel) tabFilteredItems() []skillItem { + if m.activeTab == listTabAll { + return m.allItems + } + wantAgent := m.activeTab == listTabAgents + cap := m.tabCounts[1] + if wantAgent { + cap = m.tabCounts[2] + } + filtered := make([]skillItem, 0, cap) + for _, item := range m.allItems { + if (item.entry.Kind == "agent") == wantAgent { + filtered = append(filtered, item) + } + } + return filtered +} + +// tabNoun returns the display noun for the active tab. +func (t listTab) noun() string { + switch t { + case listTabSkills: + return "skills" + case listTabAgents: + return "agents" + default: + return "resources" + } +} + +// updateTitle sets the list title based on the active tab and mode. +func (m *listTUIModel) updateTitle() { + m.list.Title = fmt.Sprintf("Installed %s (%s)", m.activeTab.noun(), m.modeLabel) +} + func (m listTUIModel) Init() tea.Cmd { if m.loading && m.loadFn != nil { return tea.Batch(m.loadSpinner.Tick, doLoadCmd(m.loadFn)) @@ -175,11 +253,12 @@ func (m listTUIModel) Init() tea.Cmd { // When filter is empty, all items are restored (full pagination). func (m *listTUIModel) applyFilter() { m.detailScroll = 0 + m.tabFiltered = m.tabFilteredItems() - // No filter — restore full item set with group separators + // No filter — restore tab-filtered item set with group separators if m.filterText == "" { - m.matchCount = len(m.allItems) - m.list.SetItems(buildGroupedItems(m.allItems)) + m.matchCount = len(m.tabFiltered) + m.list.SetItems(buildGroupedItems(m.tabFiltered)) m.list.ResetSelected() return } @@ -189,7 +268,7 @@ func (m *listTUIModel) applyFilter() { // Structured match, capped at maxListItems var matched []list.Item count := 0 - for _, item := range m.allItems { + for _, item := range m.tabFiltered { if matchSkillItem(item, q) { count++ if len(matched) < maxListItems { @@ -232,10 +311,10 @@ func (m listTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } m.allItems = msg.result.skills m.totalCount = msg.result.totalCount - m.matchCount = len(msg.result.skills) - // Populate list with group separators - m.list.SetItems(buildGroupedItems(msg.result.skills)) + m.recomputeTabCounts() + m.applyFilter() // applies tab + text filter, sets matchCount skipGroupItem(&m.list, 1) + m.updateTitle() return m, nil case tea.MouseMsg: @@ -316,6 +395,18 @@ func (m listTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case "q", "ctrl+c": m.quitting = true return m, tea.Quit + case "tab": + m.activeTab = (m.activeTab + 1) % 3 + m.applyFilter() + m.updateTitle() + skipGroupItem(&m.list, 1) + return m, nil + case "shift+tab": + m.activeTab = (m.activeTab - 1 + 3) % 3 + m.applyFilter() + m.updateTitle() + skipGroupItem(&m.list, 1) + return m, nil case "ctrl+d": m.detailScroll += 8 return m, nil @@ -475,18 +566,46 @@ func (m listTUIModel) View() string { return m.viewVertical() } +// renderTabBar renders the kind tab bar (All / Skills / Agents). +func (m listTUIModel) renderTabBar() string { + type tab struct { + label string + tab listTab + count int + } + tabs := []tab{ + {"All", listTabAll, m.tabCounts[0]}, + {"Skills", listTabSkills, m.tabCounts[1]}, + {"Agents", listTabAgents, m.tabCounts[2]}, + } + + activeStyle := lipgloss.NewStyle().Bold(true).Underline(true) + inactiveStyle := tc.Dim + + var parts []string + for _, t := range tabs { + label := fmt.Sprintf("%s(%d)", t.label, t.count) + if t.tab == m.activeTab { + parts = append(parts, activeStyle.Inherit(tc.Cyan).Render(label)) + } else { + parts = append(parts, inactiveStyle.Render(label)) + } + } + return " " + strings.Join(parts, " ") +} + // renderFilterBar renders the status line for the list TUI. func (m listTUIModel) renderFilterBar() string { return renderTUIFilterBar( m.filterInput.View(), m.filtering, m.filterText, - m.matchCount, len(m.allItems), maxListItems, - "skills", m.renderPageInfo(), + m.matchCount, len(m.tabFiltered), maxListItems, + m.activeTab.noun(), m.renderPageInfo(), ) } func (m *listTUIModel) syncListSize() { if listSplitActive(m.termWidth) { - panelHeight := m.termHeight - 5 + panelHeight := m.termHeight - 7 // -2 for tab bar if panelHeight < 6 { panelHeight = 6 } @@ -494,7 +613,7 @@ func (m *listTUIModel) syncListSize() { return } - listHeight := m.termHeight - 20 + listHeight := m.termHeight - 22 // -2 for tab bar if listHeight < 6 { listHeight = 6 } @@ -538,7 +657,10 @@ func selectedSkillKey(item list.Item) string { func (m listTUIModel) viewSplit() string { var b strings.Builder - panelHeight := m.termHeight - 5 + b.WriteString(m.renderTabBar()) + b.WriteString("\n\n") + + panelHeight := m.termHeight - 7 // -2 for tab bar if panelHeight < 6 { panelHeight = 6 } @@ -567,12 +689,12 @@ func (m listTUIModel) viewSplit() string { b.WriteString(m.renderFilterBar()) b.WriteString(m.renderSummaryFooter()) b.WriteString("\n") - helpText := "↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" + helpText := "Tab skills/agents ↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" if m.filtering { helpText = "t:type g:group r:repo k:kind Enter lock Esc clear q quit" } help := appendScrollInfo(helpText, scrollInfo) - b.WriteString(tc.Help.Render(help)) + b.WriteString(formatHelpBar(help)) b.WriteString("\n") return b.String() @@ -581,13 +703,15 @@ func (m listTUIModel) viewSplit() string { func (m listTUIModel) viewVertical() string { var b strings.Builder + b.WriteString(m.renderTabBar()) + b.WriteString("\n\n") b.WriteString(m.list.View()) b.WriteString("\n\n") b.WriteString(m.renderFilterBar()) var scrollInfo string if item, ok := m.list.SelectedItem().(skillItem); ok { - detailHeight := m.termHeight - m.termHeight*2/5 - 8 + detailHeight := m.termHeight - m.termHeight*2/5 - 10 // -2 for tab bar if detailHeight < 6 { detailHeight = 6 } @@ -606,12 +730,12 @@ func (m listTUIModel) viewVertical() string { b.WriteString(m.renderSummaryFooter()) b.WriteString("\n") - helpText := "↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" + helpText := "Tab skills/agents ↑↓ navigate ←→ page / filter Ctrl+d/u detail Enter view A audit U update E enable/disable X uninstall q quit" if m.filtering { helpText = "t:type g:group r:repo k:kind Enter lock Esc clear q quit" } help := appendScrollInfo(helpText, scrollInfo) - b.WriteString(tc.Help.Render(help)) + b.WriteString(formatHelpBar(help)) b.WriteString("\n") return b.String() @@ -621,7 +745,7 @@ func (m listTUIModel) renderSummaryFooter() string { localCount := 0 trackedCount := 0 remoteCount := 0 - for _, item := range m.allItems { + for _, item := range m.tabFiltered { switch { case item.entry.RepoName != "": trackedCount++ @@ -633,7 +757,7 @@ func (m listTUIModel) renderSummaryFooter() string { } parts := []string{ - tc.Emphasis.Render(formatNumber(m.matchCount)) + tc.Dim.Render("/") + tc.Dim.Render(formatNumber(len(m.allItems))) + tc.Dim.Render(" visible"), + tc.Emphasis.Render(formatNumber(m.matchCount)) + tc.Dim.Render("/") + tc.Dim.Render(formatNumber(len(m.tabFiltered))) + tc.Dim.Render(" visible"), tc.Cyan.Render(formatNumber(localCount)) + tc.Dim.Render(" local"), tc.Green.Render(formatNumber(trackedCount)) + tc.Dim.Render(" tracked"), tc.Yellow.Render(formatNumber(remoteCount)) + tc.Dim.Render(" remote"), @@ -704,13 +828,27 @@ func formatNumber(n int) string { return b.String() } -// getDetailData returns cached detail data for a skill, populating the cache on first access. +// getDetailData returns cached detail data for a skill or agent, populating the cache on first access. func (m listTUIModel) getDetailData(e skillEntry) *detailData { key := e.RelPath if d, ok := m.detailCache[key]; ok { return d } + if e.Kind == "agent" { + // Agents are single .md files — read frontmatter from the file directly + agentFile := filepath.Join(m.agentsSourcePath, e.RelPath) + fm := utils.ParseFrontmatterFields(agentFile, []string{"description", "license"}) + d := &detailData{ + Description: fm["description"], + License: fm["license"], + Files: []string{filepath.Base(e.RelPath)}, + SyncedTargets: m.findSyncedTargets(e), + } + m.detailCache[key] = d + return d + } + skillDir := filepath.Join(m.sourcePath, e.RelPath) skillMD := filepath.Join(skillDir, "SKILL.md") @@ -917,8 +1055,8 @@ func (m listTUIModel) findSyncedTargets(e skillEntry) []string { // runListTUI starts the bubbletea TUI for the skill list. // When loadFn is non-nil, data is loaded asynchronously inside the TUI (no blank screen). // Returns (action, skillName, skillKind, error). action is "" on normal quit (q/ctrl+c). -func runListTUI(loadFn listLoadFn, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig) (string, string, string, error) { - model := newListTUIModel(loadFn, nil, 0, modeLabel, sourcePath, agentsSourcePath, targets) +func runListTUI(loadFn listLoadFn, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig, initialKind resourceKindFilter) (string, string, string, error) { + model := newListTUIModel(loadFn, nil, 0, modeLabel, sourcePath, agentsSourcePath, targets, initialKind) p := tea.NewProgram(model, tea.WithAltScreen(), tea.WithMouseCellMotion()) finalModel, err := p.Run() if err != nil { diff --git a/cmd/skillshare/list_tui_content.go b/cmd/skillshare/list_tui_content.go index 0f1ef440..18682a97 100644 --- a/cmd/skillshare/list_tui_content.go +++ b/cmd/skillshare/list_tui_content.go @@ -242,13 +242,17 @@ func autoPreviewFile(m *listTUIModel) { } } -// contentPanelWidth returns the available text width for the right content panel. -// This accounts for PaddingLeft(1) used in rendering, so content renders at -// the exact width the panel can display without line-wrapping. +// contentPanelWidth returns the available text width for the content panel. +// Agents use full-width (no sidebar); skills use dual-pane layout. func (m *listTUIModel) contentPanelWidth() int { + if m.contentKind == "agent" { + w := m.termWidth - 4 + if w < 40 { + w = 40 + } + return w + } sw := sidebarWidth(m.termWidth) - // lipgloss Width includes padding, so subtract 1 for PaddingLeft(1) - // Layout: leftMargin(1) + sidebar(sw) + PaddingLeft(1) + border(1) + PaddingLeft(1) + content + rightMargin(1) w := m.termWidth - sw - 5 - 1 if w < 40 { w = 40 @@ -312,8 +316,8 @@ func renderMarkdown(text string, width int) string { // ─── Keyboard Handling ─────────────────────────────────────────────── -// handleContentKey handles keyboard input in the dual-pane content viewer. -// Keyboard always controls the left tree; Ctrl+d/u/g/G scroll the right panel. +// handleContentKey handles keyboard input in the content viewer. +// Agents (single file) only support scroll; skills support tree navigation + scroll. func (m listTUIModel) handleContentKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { switch msg.String() { case "q", "ctrl+c": @@ -323,35 +327,36 @@ func (m listTUIModel) handleContentKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { m.showContent = false return m, nil - // Left tree: navigate + // Left tree: navigate (skills only — agents have no sidebar) case "j", "down": - if m.treeCursor < len(m.treeNodes)-1 { + if m.contentKind != "agent" && m.treeCursor < len(m.treeNodes)-1 { m.treeCursor++ m.ensureTreeCursorVisible() autoPreviewFile(&m) } return m, nil case "k", "up": - if m.treeCursor > 0 { + if m.contentKind != "agent" && m.treeCursor > 0 { m.treeCursor-- m.ensureTreeCursorVisible() autoPreviewFile(&m) } return m, nil case "l", "right", "enter": - if len(m.treeNodes) > 0 && m.treeCursor < len(m.treeNodes) { + if m.contentKind != "agent" && len(m.treeNodes) > 0 && m.treeCursor < len(m.treeNodes) { node := m.treeNodes[m.treeCursor] if node.isDir { toggleTreeDir(&m) } - // Files are already auto-previewed by j/k } return m, nil case "h", "left": - collapseOrParent(&m) + if m.contentKind != "agent" { + collapseOrParent(&m) + } return m, nil - // Right content: scroll + // Content scroll case "ctrl+d": half := m.contentViewHeight() / 2 max := m.contentMaxScroll() @@ -392,8 +397,50 @@ func sidebarWidth(termWidth int) int { return w } -// renderContentOverlay renders the full-screen dual-pane content viewer. +// renderContentOverlay renders the full-screen content viewer. +// Agents (single .md file) use a full-width layout without sidebar. +// Skills (directory with multiple files) use a dual-pane layout with file tree. func renderContentOverlay(m listTUIModel) string { + if m.contentKind == "agent" { + return renderContentFullWidth(m) + } + return renderContentDualPane(m) +} + +// renderContentFullWidth renders the content viewer without sidebar (for agents). +func renderContentFullWidth(m listTUIModel) string { + var b strings.Builder + + skillName := filepath.Base(m.contentSkillKey) + b.WriteString("\n") + b.WriteString(tc.Title.Render(fmt.Sprintf(" %s", skillName))) + b.WriteString("\n\n") + + textW := m.contentPanelWidth() + contentHeight := m.contentViewHeight() + contentStr, scrollInfo := renderContentStr(m, textW, contentHeight) + + panelW := textW + 2 // +2 for PaddingLeft(2) + panel := lipgloss.NewStyle(). + Width(panelW).MaxWidth(panelW). + Height(contentHeight).MaxHeight(contentHeight). + PaddingLeft(2). + Render(contentStr) + b.WriteString(panel) + b.WriteString("\n\n") + + help := "Ctrl+d/u scroll g/G top/bottom Esc back q quit" + if scrollInfo != "" { + help += " " + scrollInfo + } + b.WriteString(formatHelpBar(help)) + b.WriteString("\n") + + return b.String() +} + +// renderContentDualPane renders the dual-pane content viewer with file tree sidebar. +func renderContentDualPane(m listTUIModel) string { var b strings.Builder titleStyle := tc.Title @@ -452,7 +499,7 @@ func renderContentOverlay(m listTUIModel) string { if scrollInfo != "" { help += " " + scrollInfo } - b.WriteString(tc.Help.Render(help)) + b.WriteString(formatHelpBar(help)) b.WriteString("\n") return b.String() diff --git a/cmd/skillshare/list_tui_test.go b/cmd/skillshare/list_tui_test.go index a39df093..54bba33f 100644 --- a/cmd/skillshare/list_tui_test.go +++ b/cmd/skillshare/list_tui_test.go @@ -41,13 +41,15 @@ func TestListDetailStatusBits(t *testing.T) { } func TestListSummaryFooterCounts(t *testing.T) { + items := []skillItem{ + {entry: skillEntry{Name: "local", RelPath: "local"}}, + {entry: skillEntry{Name: "tracked", RelPath: "tracked", RepoName: "team/repo"}}, + {entry: skillEntry{Name: "remote", RelPath: "remote", Source: "github.com/example/repo"}}, + } m := listTUIModel{ - allItems: []skillItem{ - {entry: skillEntry{Name: "local", RelPath: "local"}}, - {entry: skillEntry{Name: "tracked", RelPath: "tracked", RepoName: "team/repo"}}, - {entry: skillEntry{Name: "remote", RelPath: "remote", Source: "github.com/example/repo"}}, - }, - matchCount: 2, + allItems: items, + tabFiltered: items, // All tab — same as allItems + matchCount: 2, } got := m.renderSummaryFooter() @@ -98,7 +100,7 @@ func TestListViewSplit_HeaderKeepsSkillNameWhenDetailScrolled(t *testing.T) { }, } - m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil) + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) m.termWidth = 120 m.termHeight = 30 m.detailScroll = 999 @@ -133,7 +135,7 @@ func TestApplyFilter_WithTags(t *testing.T) { {entry: skillEntry{Name: "remote-a", RelPath: "remote-a", Source: "github.com/foo/bar"}}, } - m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil) + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) // Filter by type:tracked — should match 2 items m.filterText = "t:tracked" @@ -184,3 +186,113 @@ func TestApplyFilter_WithTags(t *testing.T) { t.Fatalf("cleared matchCount = %d, want %d", m.matchCount, len(items)) } } + +func TestTabCounts(t *testing.T) { + items := []skillItem{ + {entry: skillEntry{Name: "s1", RelPath: "s1", Kind: "skill"}}, + {entry: skillEntry{Name: "s2", RelPath: "s2", Kind: "skill"}}, + {entry: skillEntry{Name: "a1", RelPath: "a1.md", Kind: "agent"}}, + } + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) + want := [3]int{3, 2, 1} + if m.tabCounts != want { + t.Fatalf("tabCounts = %v, want %v", m.tabCounts, want) + } +} + +func TestTabSwitchFiltersItems(t *testing.T) { + items := []skillItem{ + {entry: skillEntry{Name: "skill-a", RelPath: "skill-a", Kind: "skill"}}, + {entry: skillEntry{Name: "skill-b", RelPath: "skill-b", Kind: "skill"}}, + {entry: skillEntry{Name: "agent-a", RelPath: "agent-a.md", Kind: "agent"}}, + } + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) + + // Default All tab — should show 3 + if m.matchCount != 3 { + t.Fatalf("All tab matchCount = %d, want 3", m.matchCount) + } + + // Switch to Skills tab + m.activeTab = listTabSkills + m.applyFilter() + if m.matchCount != 2 { + t.Fatalf("Skills tab matchCount = %d, want 2", m.matchCount) + } + + // Switch to Agents tab + m.activeTab = listTabAgents + m.applyFilter() + if m.matchCount != 1 { + t.Fatalf("Agents tab matchCount = %d, want 1", m.matchCount) + } +} + +func TestInitialKindSetsTab(t *testing.T) { + m := newListTUIModel(nil, nil, 0, "global", t.TempDir(), "", nil, kindAgents) + if m.activeTab != listTabAgents { + t.Fatalf("initialKind=kindAgents → activeTab = %d, want %d", m.activeTab, listTabAgents) + } + + m2 := newListTUIModel(nil, nil, 0, "global", t.TempDir(), "", nil, kindSkills) + if m2.activeTab != listTabSkills { + t.Fatalf("initialKind=kindSkills → activeTab = %d, want %d", m2.activeTab, listTabSkills) + } +} + +func TestTabWithFilterComposition(t *testing.T) { + items := []skillItem{ + {entry: skillEntry{Name: "react", RelPath: "react", Kind: "skill"}}, + {entry: skillEntry{Name: "vue", RelPath: "vue", Kind: "skill"}}, + {entry: skillEntry{Name: "react-agent", RelPath: "react-agent.md", Kind: "agent"}}, + } + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) + + // Skills tab + text filter "react" → only skill "react" + m.activeTab = listTabSkills + m.filterText = "react" + m.applyFilter() + if m.matchCount != 1 { + t.Fatalf("Skills+react matchCount = %d, want 1", m.matchCount) + } + + // All tab + text filter "react" → skill "react" + agent "react-agent" + m.activeTab = listTabAll + m.applyFilter() + if m.matchCount != 2 { + t.Fatalf("All+react matchCount = %d, want 2", m.matchCount) + } +} + +func TestTabBarRendering(t *testing.T) { + items := []skillItem{ + {entry: skillEntry{Name: "s1", RelPath: "s1", Kind: "skill"}}, + {entry: skillEntry{Name: "a1", RelPath: "a1.md", Kind: "agent"}}, + } + m := newListTUIModel(nil, items, len(items), "global", t.TempDir(), "", nil, kindAll) + bar := xansi.Strip(m.renderTabBar()) + for _, want := range []string{"All(2)", "Skills(1)", "Agents(1)"} { + if !strings.Contains(bar, want) { + t.Fatalf("tab bar missing %q in %q", want, bar) + } + } +} + +func TestUpdateTitle(t *testing.T) { + m := newListTUIModel(nil, nil, 0, "global", t.TempDir(), "", nil, kindAll) + if !strings.Contains(m.list.Title, "resources") { + t.Fatalf("All tab title = %q, want 'resources'", m.list.Title) + } + + m.activeTab = listTabSkills + m.updateTitle() + if !strings.Contains(m.list.Title, "skills") { + t.Fatalf("Skills tab title = %q, want 'skills'", m.list.Title) + } + + m.activeTab = listTabAgents + m.updateTitle() + if !strings.Contains(m.list.Title, "agents") { + t.Fatalf("Agents tab title = %q, want 'agents'", m.list.Title) + } +} diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index 8daf1594..8849f330 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -133,8 +133,8 @@ func cmdSync(args []string) error { stats.ProjectScope = true logSyncOp(config.ProjectConfigPath(cwd), stats, start, err) - // Append agent sync when kind=all - if kind == kindAll { + // Append agent sync when kind=all or --all + if kind == kindAll || hasAll { if agentErr := syncAgentsProject(cwd, dryRun, force, jsonOutput, start); agentErr != nil && err == nil { err = agentErr } @@ -285,8 +285,8 @@ func cmdSync(args []string) error { return syncOutputJSON(results, dryRun, start, ignoreStats, syncErr) } - // Agent sync when kind=all (after skill sync) - if kind == kindAll { + // Agent sync when kind=all or --all (after skill sync) + if kind == kindAll || hasAll { if _, agentErr := syncAgentsGlobal(cfg, dryRun, force, jsonOutput, start); agentErr != nil && syncErr == nil { syncErr = agentErr } @@ -802,7 +802,7 @@ func printSyncHelp() { Sync skills from source to all configured targets. Options: - --all Sync skills and extras + --all Sync skills, agents, and extras --dry-run, -n Preview changes without applying --force, -f Force sync (overwrite local changes) --json Output results as JSON diff --git a/cmd/skillshare/tui_colors.go b/cmd/skillshare/tui_colors.go index 590150e5..ed9be22f 100644 --- a/cmd/skillshare/tui_colors.go +++ b/cmd/skillshare/tui_colors.go @@ -35,8 +35,9 @@ var tc = struct { Border lipgloss.Style // panel borders — faint // Filter & help - Filter lipgloss.Style // filter prompt/cursor — cyan - Help lipgloss.Style // help bar — faint, left margin + Filter lipgloss.Style // filter prompt/cursor — cyan + Help lipgloss.Style // help bar — faint, left margin + HelpKey lipgloss.Style // help bar key highlight — dim cyan // List browser chrome ListRow lipgloss.Style @@ -78,8 +79,9 @@ var tc = struct { Separator: lipgloss.NewStyle().Faint(true), Border: lipgloss.NewStyle().Faint(true), - Filter: lipgloss.NewStyle().Foreground(lipgloss.Color("6")), - Help: lipgloss.NewStyle().MarginLeft(2).Faint(true), + Filter: lipgloss.NewStyle().Foreground(lipgloss.Color("6")), + Help: lipgloss.NewStyle().MarginLeft(2).Faint(true), + HelpKey: lipgloss.NewStyle().Foreground(lipgloss.Color("6")).Faint(true), ListRow: lipgloss.NewStyle().PaddingLeft(1), ListMeta: lipgloss.NewStyle().PaddingLeft(1).Faint(true), diff --git a/cmd/skillshare/tui_helpers.go b/cmd/skillshare/tui_helpers.go index 17ab4c3c..068d5c51 100644 --- a/cmd/skillshare/tui_helpers.go +++ b/cmd/skillshare/tui_helpers.go @@ -46,6 +46,31 @@ func appendScrollInfo(help, scrollInfo string) string { return help } +// formatHelpBar colorizes a help string like "Tab skills/agents ↑↓ navigate q quit". +// Each pair "key desc" is parsed: key gets HelpKey style (dim cyan), desc stays dim. +// Pairs are separated by two or more spaces. +func formatHelpBar(raw string) string { + // Split by double-space to get individual "key desc" pairs + pairs := strings.Split(raw, " ") + var parts []string + for _, pair := range pairs { + pair = strings.TrimSpace(pair) + if pair == "" { + continue + } + // Split first space: key + description + if idx := strings.IndexByte(pair, ' '); idx > 0 { + key := pair[:idx] + desc := pair[idx:] + parts = append(parts, tc.HelpKey.Render(key)+tc.Help.UnsetMarginLeft().Render(desc)) + } else { + // Single word (e.g. just a key) + parts = append(parts, tc.HelpKey.Render(pair)) + } + } + return " " + strings.Join(parts, " ") +} + // applyDetailScrollSplit applies scrolling and returns (visible content, scroll info). func applyDetailScrollSplit(content string, detailScroll, viewHeight int) (string, string) { lines := strings.Split(content, "\n") diff --git a/cmd/skillshare/uninstall_agents.go b/cmd/skillshare/uninstall_agents.go index 022a0f62..6bba5024 100644 --- a/cmd/skillshare/uninstall_agents.go +++ b/cmd/skillshare/uninstall_agents.go @@ -22,39 +22,55 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string return fmt.Errorf("cannot access agents source: %w", err) } - // Resolve agent names - var names []string + // Discover all agents for resolution + discovered, discErr := resource.AgentKind{}.Discover(agentsDir) + if discErr != nil { + return fmt.Errorf("failed to discover agents: %w", discErr) + } + + // Resolve targets + var targets []resource.DiscoveredResource if opts.all { - discovered, err := resource.AgentKind{}.Discover(agentsDir) - if err != nil { - return fmt.Errorf("failed to discover agents: %w", err) - } - for _, d := range discovered { - names = append(names, d.Name) - } - if len(names) == 0 { + targets = discovered + if len(targets) == 0 { ui.Info("No agents found") return nil } } else { - names = opts.skillNames + for _, input := range opts.skillNames { + found := false + for _, d := range discovered { + if d.Name == input || d.FlatName == input || d.RelPath == input || strings.TrimSuffix(d.RelPath, ".md") == input { + targets = append(targets, d) + found = true + break + } + } + if !found { + return fmt.Errorf("agent %q not found in %s", input, agentsDir) + } + } } - if len(names) == 0 { + if len(targets) == 0 { return fmt.Errorf("specify agent name(s) or --all") } - // Validate all agents exist before removing any - for _, name := range names { - agentFile := filepath.Join(agentsDir, name+".md") - if _, err := os.Stat(agentFile); err != nil { - return fmt.Errorf("agent %q not found in %s", name, agentsDir) - } - } - // Confirmation (unless --force or --json) if !opts.force && !opts.jsonOutput { - ui.Warning("This will remove %d agent(s): %s", len(names), strings.Join(names, ", ")) + ui.Warning("Uninstalling %d agent(s)", len(targets)) + const maxDisplay = 20 + if len(targets) <= maxDisplay { + for _, t := range targets { + fmt.Printf(" - %s\n", t.Name) + } + } else { + for _, t := range targets[:maxDisplay] { + fmt.Printf(" - %s\n", t.Name) + } + fmt.Printf(" ... and %d more\n", len(targets)-maxDisplay) + } + fmt.Println() fmt.Print("Continue? [y/N] ") var input string fmt.Scanln(&input) @@ -69,25 +85,26 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string var removed []string var failed []string - for _, name := range names { - agentFile := filepath.Join(agentsDir, name+".md") - metaFile := filepath.Join(agentsDir, name+".skillshare-meta.json") + for _, t := range targets { + agentFile := filepath.Join(agentsDir, t.RelPath) + metaName := strings.TrimSuffix(filepath.Base(t.RelPath), ".md") + metaFile := filepath.Join(filepath.Dir(agentFile), metaName+".skillshare-meta.json") if opts.dryRun { - ui.Info("[dry-run] Would remove agent: %s", name) - removed = append(removed, name) + ui.Info("[dry-run] Would remove agent: %s", t.Name) + removed = append(removed, t.Name) continue } - _, err := trash.MoveAgentToTrash(agentFile, metaFile, name, trashBase) + _, err := trash.MoveAgentToTrash(agentFile, metaFile, t.Name, trashBase) if err != nil { - ui.Error("Failed to remove %s: %v", name, err) - failed = append(failed, name) + ui.Error("Failed to remove %s: %v", t.Name, err) + failed = append(failed, t.Name) continue } - ui.Success("Removed agent: %s", name) - removed = append(removed, name) + ui.Success("Removed agent: %s", t.Name) + removed = append(removed, t.Name) } // JSON output @@ -120,7 +137,7 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string } // Oplog - logUninstallAgentOp(cfgPath, names, len(removed), len(failed), opts.dryRun, start) + logUninstallAgentOp(cfgPath, removed, len(removed), len(failed), opts.dryRun, start) if len(failed) > 0 { return fmt.Errorf("%d agent(s) failed to uninstall", len(failed)) diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index 226ec3d6..a1dde6cc 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -166,6 +166,9 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { if isAgent { // Agent install: copy single .md file to agents source agentsDir := s.agentsSource() + if body.Into != "" { + agentsDir = filepath.Join(agentsDir, body.Into) + } agentInfo := install.AgentInfo{ Name: sel.Name, Path: sel.Path, diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 9ade98be..b29c441d 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -13,6 +13,7 @@ import ( "skillshare/internal/config" "skillshare/internal/git" "skillshare/internal/install" + "skillshare/internal/resource" "skillshare/internal/sync" "skillshare/internal/trash" "skillshare/internal/utils" @@ -90,26 +91,20 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { } } - // Agents + // Agents — recursive discovery (supports --into subdirectories) if (kindFilter == "" || kindFilter == "agent") && agentsSource != "" { - agentEntries, _ := os.ReadDir(agentsSource) - for _, e := range agentEntries { - if e.IsDir() || !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { - continue - } - agentName := strings.TrimSuffix(e.Name(), ".md") - agentPath := filepath.Join(agentsSource, e.Name()) - + discovered, _ := resource.AgentKind{}.Discover(agentsSource) + for _, d := range discovered { item := skillItem{ - Name: agentName, + Name: d.Name, Kind: "agent", - FlatName: e.Name(), - RelPath: e.Name(), - SourcePath: agentPath, + FlatName: d.FlatName, + RelPath: d.RelPath, + SourcePath: d.SourcePath, } - // Check for agent metadata (agent meta is a standalone file, not inside a dir) - metaPath := filepath.Join(agentsSource, agentName+".skillshare-meta.json") + // Read sidecar metadata: .skillshare-meta.json + metaPath := filepath.Join(filepath.Dir(d.SourcePath), strings.TrimSuffix(filepath.Base(d.RelPath), ".md")+".skillshare-meta.json") if metaData, readErr := os.ReadFile(metaPath); readErr == nil { var meta install.SkillMeta if json.Unmarshal(metaData, &meta) == nil { @@ -204,22 +199,29 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { return } - // Fallback: check agents source + // Fallback: check agents source (recursive — supports --into subdirectories) if agentsSource != "" { - agentName := strings.TrimSuffix(name, ".md") - agentFile := agentName + ".md" - agentPath := filepath.Join(agentsSource, agentFile) - if data, err := os.ReadFile(agentPath); err == nil { + agentDiscovered, _ := resource.AgentKind{}.Discover(agentsSource) + for _, d := range agentDiscovered { + if d.FlatName != name && d.Name != name { + continue + } + + data, readErr := os.ReadFile(d.SourcePath) + if readErr != nil { + continue + } + item := skillItem{ - Name: agentName, + Name: d.Name, Kind: "agent", - FlatName: agentFile, - RelPath: agentFile, - SourcePath: agentPath, + FlatName: d.FlatName, + RelPath: d.RelPath, + SourcePath: d.SourcePath, } - metaFilePath := filepath.Join(agentsSource, agentName+".skillshare-meta.json") - if metaData, readErr := os.ReadFile(metaFilePath); readErr == nil { + metaPath := filepath.Join(filepath.Dir(d.SourcePath), strings.TrimSuffix(filepath.Base(d.RelPath), ".md")+".skillshare-meta.json") + if metaData, metaReadErr := os.ReadFile(metaPath); metaReadErr == nil { var meta install.SkillMeta if json.Unmarshal(metaData, &meta) == nil { item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) @@ -233,7 +235,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { writeJSON(w, map[string]any{ "skill": item, "skillMdContent": string(data), - "files": []string{agentFile}, + "files": []string{filepath.Base(d.RelPath)}, }) return } diff --git a/internal/ui/pterm.go b/internal/ui/pterm.go index a489a290..ebf6c52d 100644 --- a/internal/ui/pterm.go +++ b/internal/ui/pterm.go @@ -587,6 +587,46 @@ type SyncStats struct { Duration time.Duration } +// AgentSyncStats holds statistics for agent sync summary. +type AgentSyncStats struct { + Targets int + Linked int + Local int + Updated int + Pruned int + Duration time.Duration +} + +// AgentSyncSummary prints an agent sync summary line. +func AgentSyncSummary(stats AgentSyncStats) { + OperationSummary("Agent sync", stats.Duration, + Metric{Label: "targets", Count: stats.Targets, HighlightColor: pterm.Cyan}, + Metric{Label: "linked", Count: stats.Linked, HighlightColor: pterm.Green}, + Metric{Label: "local", Count: stats.Local, HighlightColor: pterm.Blue}, + Metric{Label: "updated", Count: stats.Updated, HighlightColor: pterm.Yellow}, + Metric{Label: "pruned", Count: stats.Pruned, HighlightColor: pterm.Yellow}, + ) +} + +// ExtrasSyncStats holds statistics for extras sync summary. +type ExtrasSyncStats struct { + Targets int + Synced int + Skipped int + Pruned int + Duration time.Duration +} + +// ExtrasSyncSummary prints an extras sync summary line. +func ExtrasSyncSummary(stats ExtrasSyncStats) { + OperationSummary("Extras sync", stats.Duration, + Metric{Label: "targets", Count: stats.Targets, HighlightColor: pterm.Cyan}, + Metric{Label: "synced", Count: stats.Synced, HighlightColor: pterm.Green}, + Metric{Label: "skipped", Count: stats.Skipped, HighlightColor: pterm.Yellow}, + Metric{Label: "pruned", Count: stats.Pruned, HighlightColor: pterm.Yellow}, + ) +} + // UpdateSummary prints an update summary line matching SyncSummary style. func UpdateSummary(stats UpdateStats) { OperationSummary("Update", stats.Duration, From 3241159b56a5a01a56897fdf4dad08026b9a03b0 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 22:05:34 +0800 Subject: [PATCH 069/205] style(sync): unify summary format and headers across skills, agents, extras - Replace ui.Info() agent summary with ui.AgentSyncSummary() using OperationSummary pattern (colored metrics, consistent spacing) - Add ui.ExtrasSyncSummary() call to extras sync (was missing summary) - Unify section headers: 'Syncing X' / 'Syncing X (project)' pattern - Remove extra fmt.Println() before extras to fix double blank lines - Rename agentSyncStats.skipped to .local to match UI semantics --- cmd/skillshare/sync.go | 2 -- cmd/skillshare/sync_agents.go | 44 +++++++++++++++++---------- cmd/skillshare/sync_extras.go | 26 +++++++++++++--- tests/integration/extras_test.go | 4 +-- tests/integration/sync_extras_test.go | 6 ++-- 5 files changed, 55 insertions(+), 27 deletions(-) diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index 8849f330..03d1f6a5 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -122,7 +122,6 @@ func cmdSync(args []string) error { if hasAll && !jsonOutput { // Run project extras sync after project skills sync (text mode) defer func() { - fmt.Println() if extrasErr := cmdSyncExtras(append([]string{"-p"}, rest...)); extrasErr != nil { ui.Warning("Extras sync: %v", extrasErr) } @@ -293,7 +292,6 @@ func cmdSync(args []string) error { } if hasAll { - fmt.Println() if extrasErr := cmdSyncExtras(rest); extrasErr != nil { ui.Warning("Extras sync: %v", extrasErr) } diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index 8194d979..84cec5ab 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -16,7 +16,7 @@ import ( // agentSyncStats aggregates per-target agent sync results. type agentSyncStats struct { - linked, skipped, updated, pruned int + linked, local, updated, pruned int } // syncAgentsGlobal discovers agents and syncs them to all agent-capable targets. @@ -60,6 +60,7 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start var totals agentSyncStats var syncErr error var skippedTargets []string + var targetCount int for name := range cfg.Targets { agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) @@ -67,6 +68,7 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start skippedTargets = append(skippedTargets, name) continue } + targetCount++ tc := cfg.Targets[name] ac := tc.AgentsConfig() @@ -75,16 +77,20 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start syncErr = fmt.Errorf("some agent targets failed to sync") } totals.linked += stats.linked - totals.skipped += stats.skipped + totals.local += stats.local totals.updated += stats.updated totals.pruned += stats.pruned } if !jsonOutput { - fmt.Println() - ui.Info("Agent sync: %d linked, %d local, %d updated, %d pruned (%s)", - totals.linked, totals.skipped, totals.updated, totals.pruned, - formatDuration(start)) + ui.AgentSyncSummary(ui.AgentSyncStats{ + Targets: targetCount, + Linked: totals.linked, + Local: totals.local, + Updated: totals.updated, + Pruned: totals.pruned, + Duration: time.Since(start), + }) if len(skippedTargets) > 0 { sort.Strings(skippedTargets) ui.Warning("%d target(s) skipped for agents (no agents path): %s", @@ -135,7 +141,7 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start } if !jsonOutput { - ui.Header("Syncing project agents") + ui.Header("Syncing agents (project)") if dryRun { ui.Warning("Dry run mode - no changes will be made") } @@ -145,6 +151,7 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start var totals agentSyncStats var syncErr error var skippedTargets []string + var targetCount int // Load project config for target list projCfg, loadErr := config.LoadProject(projectRoot) @@ -158,6 +165,7 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start skippedTargets = append(skippedTargets, entry.Name) continue } + targetCount++ ac := entry.AgentsConfig() stats, targetErr := syncAgentTarget(entry.Name, agentPath, ac.Mode, agents, agentsSource, dryRun, force, jsonOutput) @@ -165,16 +173,20 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start syncErr = fmt.Errorf("some agent targets failed to sync") } totals.linked += stats.linked - totals.skipped += stats.skipped + totals.local += stats.local totals.updated += stats.updated totals.pruned += stats.pruned } if !jsonOutput { - fmt.Println() - ui.Info("Project agent sync: %d linked, %d local, %d updated, %d pruned (%s)", - totals.linked, totals.skipped, totals.updated, totals.pruned, - formatDuration(start)) + ui.AgentSyncSummary(ui.AgentSyncStats{ + Targets: targetCount, + Linked: totals.linked, + Local: totals.local, + Updated: totals.updated, + Pruned: totals.pruned, + Duration: time.Since(start), + }) if len(skippedTargets) > 0 { sort.Strings(skippedTargets) ui.Warning("%d target(s) skipped for agents (no agents path): %s", @@ -211,7 +223,7 @@ func syncAgentTarget(name, agentPath, modeOverride string, agents []resource.Dis stats := agentSyncStats{ linked: len(result.Linked), - skipped: len(result.Skipped), + local: len(result.Skipped), updated: len(result.Updated), pruned: len(pruned), } @@ -227,9 +239,9 @@ func syncAgentTarget(name, agentPath, modeOverride string, agents []resource.Dis func reportAgentSyncResult(name, mode string, stats agentSyncStats, dryRun bool) { if stats.linked > 0 || stats.updated > 0 || stats.pruned > 0 { ui.Success("%s: agents %s (%d linked, %d local, %d updated, %d pruned)", - name, mode, stats.linked, stats.skipped, stats.updated, stats.pruned) - } else if stats.skipped > 0 { - ui.Success("%s: agents %s (%d local preserved)", name, mode, stats.skipped) + name, mode, stats.linked, stats.local, stats.updated, stats.pruned) + } else if stats.local > 0 { + ui.Success("%s: agents %s (%d local preserved)", name, mode, stats.local) } else { ui.Success("%s: agents %s (up to date)", name, mode) } diff --git a/cmd/skillshare/sync_extras.go b/cmd/skillshare/sync_extras.go index 521897e3..8ac9cb02 100644 --- a/cmd/skillshare/sync_extras.go +++ b/cmd/skillshare/sync_extras.go @@ -99,11 +99,11 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error ui.Warning("Dry run mode - no changes will be made") } - var totalSynced, totalSkipped, totalPruned, totalErrors int + var totalSynced, totalSkipped, totalPruned, totalErrors, totalTargets int var jsonEntries []syncExtrasJSONEntry if !jsonOutput { - ui.Header(ui.WithModeLabel("Sync Extras")) + ui.Header(ui.WithModeLabel("Syncing extras")) } for _, extra := range cfg.Extras { @@ -128,6 +128,7 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error jsonEntry := syncExtrasJSONEntry{Name: extra.Name} for _, target := range extra.Targets { + totalTargets++ mode := target.Mode if mode == "" { mode = "merge" @@ -217,6 +218,14 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error return writeJSON(&output) } + ui.ExtrasSyncSummary(ui.ExtrasSyncStats{ + Targets: totalTargets, + Synced: totalSynced, + Skipped: totalSkipped, + Pruned: totalPruned, + Duration: time.Since(start), + }) + if totalErrors > 0 { return fmt.Errorf("%d extras sync error(s)", totalErrors) } @@ -245,11 +254,11 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time ui.Warning("Dry run mode - no changes will be made") } - var totalSynced, totalSkipped, totalPruned, totalErrors int + var totalSynced, totalSkipped, totalPruned, totalErrors, totalTargets int var jsonEntries []syncExtrasJSONEntry if !jsonOutput { - ui.Header(ui.WithModeLabel("Sync Extras")) + ui.Header(ui.WithModeLabel("Syncing extras")) } for _, extra := range projCfg.Extras { @@ -269,6 +278,7 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time jsonEntry := syncExtrasJSONEntry{Name: extra.Name} for _, target := range extra.Targets { + totalTargets++ mode := target.Mode if mode == "" { mode = "merge" @@ -363,6 +373,14 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time return writeJSON(&output) } + ui.ExtrasSyncSummary(ui.ExtrasSyncStats{ + Targets: totalTargets, + Synced: totalSynced, + Skipped: totalSkipped, + Pruned: totalPruned, + Duration: time.Since(start), + }) + if totalErrors > 0 { return fmt.Errorf("%d extras sync error(s)", totalErrors) } diff --git a/tests/integration/extras_test.go b/tests/integration/extras_test.go index ffda8054..d522ba13 100644 --- a/tests/integration/extras_test.go +++ b/tests/integration/extras_test.go @@ -208,8 +208,8 @@ extras: result := sb.RunCLI("sync", "extras", "-g") result.AssertSuccess(t) - // Header should show "Sync Extras" - result.AssertAnyOutputContains(t, "Sync Extras") + // Header should show "Syncing extras" + result.AssertAnyOutputContains(t, "Syncing extras") // Sync verb or file count should appear result.AssertAnyOutputContains(t, "synced") diff --git a/tests/integration/sync_extras_test.go b/tests/integration/sync_extras_test.go index daef3046..44783296 100644 --- a/tests/integration/sync_extras_test.go +++ b/tests/integration/sync_extras_test.go @@ -45,7 +45,7 @@ extras: result := sb.RunCLI("sync", "extras") result.AssertSuccess(t) - result.AssertAnyOutputContains(t, "Sync Extras") + result.AssertAnyOutputContains(t, "Syncing extras") result.AssertAnyOutputContains(t, "2 files") // Verify files are symlinks @@ -99,7 +99,7 @@ extras: result := sb.RunCLI("sync", "extras") result.AssertSuccess(t) - result.AssertAnyOutputContains(t, "Sync Extras") + result.AssertAnyOutputContains(t, "Syncing extras") // Verify file exists and is a real copy (not a symlink) copiedFile := filepath.Join(rulesTarget, "coding.md") @@ -226,7 +226,7 @@ extras: result.AssertAnyOutputContains(t, "merged") // Verify extras sync happened - result.AssertAnyOutputContains(t, "Sync Extras") + result.AssertAnyOutputContains(t, "Syncing extras") // Verify skill symlink if !sb.IsSymlink(filepath.Join(targetPath, "my-skill")) { From 34c275a2b84530b81cf15c9e8984e319b4a978a8 Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 22:09:51 +0800 Subject: [PATCH 070/205] fix(sync): remove extra indent between checkmark and path in extras output --- cmd/skillshare/sync_extras.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/cmd/skillshare/sync_extras.go b/cmd/skillshare/sync_extras.go index 8ac9cb02..773a3341 100644 --- a/cmd/skillshare/sync_extras.go +++ b/cmd/skillshare/sync_extras.go @@ -144,7 +144,7 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error if syncErr != nil { if !jsonOutput { - ui.Warning(" %s: %v", shortTarget, syncErr) + ui.Warning("%s: %v", shortTarget, syncErr) } jsonTarget.Error = syncErr.Error() jsonEntry.Targets = append(jsonEntry.Targets, jsonTarget) @@ -174,11 +174,11 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error if result.Pruned > 0 { parts = append(parts, fmt.Sprintf("%d pruned", result.Pruned)) } - ui.Success(" %s %s (%s)", shortTarget, strings.Join(parts, ", "), mode) + ui.Success("%s %s (%s)", shortTarget, strings.Join(parts, ", "), mode) } else if result.Skipped > 0 { - ui.Warning(" %s %d files skipped (use --force to override)", shortTarget, result.Skipped) + ui.Warning("%s %d files skipped (use --force to override)", shortTarget, result.Skipped) } else { - ui.Success(" %s up to date (%s)", shortTarget, mode) + ui.Success("%s up to date (%s)", shortTarget, mode) } for _, e := range result.Errors { @@ -300,7 +300,7 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time if syncErr != nil { if !jsonOutput { - ui.Warning(" %s: %v", shortTarget, syncErr) + ui.Warning("%s: %v", shortTarget, syncErr) } jsonTarget.Error = syncErr.Error() jsonEntry.Targets = append(jsonEntry.Targets, jsonTarget) @@ -329,11 +329,11 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time if result.Pruned > 0 { parts = append(parts, fmt.Sprintf("%d pruned", result.Pruned)) } - ui.Success(" %s %s (%s)", shortTarget, strings.Join(parts, ", "), mode) + ui.Success("%s %s (%s)", shortTarget, strings.Join(parts, ", "), mode) } else if result.Skipped > 0 { - ui.Warning(" %s %d files skipped (use --force to override)", shortTarget, result.Skipped) + ui.Warning("%s %d files skipped (use --force to override)", shortTarget, result.Skipped) } else { - ui.Success(" %s up to date (%s)", shortTarget, mode) + ui.Success("%s up to date (%s)", shortTarget, mode) } for _, e := range result.Errors { From 49d33938a2027abbddb7249fbf9948a68127997b Mon Sep 17 00:00:00 2001 From: Willie Date: Tue, 7 Apr 2026 22:35:59 +0800 Subject: [PATCH 071/205] feat(sync): skip extras 'agents' targets that overlap with agents sync MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When extras has an entry named 'agents' whose target paths overlap with the resolved agent target paths from targets.yaml, the extras targets are skipped with a warning — the agents sync system takes priority. Detection uses dual conditions: extras name == 'agents' AND resolved path overlap. Non-'agents' extras targeting agent paths are unaffected. Agents source must exist with actual files; empty/missing source means no overlap (extras sync normally). Also fixes: - CheckAgents now recurses into subdirectories via resource.AgentKind - update agents resolves nested agent names via filepath.Base - filterAgentCheckResults matches both full path and basename --- cmd/skillshare/sync.go | 6 +- cmd/skillshare/sync_agents.go | 58 +++++++ cmd/skillshare/sync_extras.go | 78 ++++++++- cmd/skillshare/update_agents.go | 5 +- internal/check/agent_check.go | 34 ++-- internal/check/agent_check_test.go | 32 ++++ tests/integration/sync_extras_test.go | 229 ++++++++++++++++++++++++++ 7 files changed, 412 insertions(+), 30 deletions(-) diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index 03d1f6a5..6fded2e5 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -143,9 +143,10 @@ func cmdSync(args []string) error { if hasAll { projCfg, loadErr := config.LoadProject(cwd) if loadErr == nil && len(projCfg.Extras) > 0 { + agentPaths := collectAgentTargetPathsProject(cwd) extrasEntries := runExtrasSyncEntries(projCfg.Extras, func(extra config.ExtraConfig) string { return config.ExtrasSourceDirProject(cwd, extra.Name) - }, dryRun, force, cwd) + }, dryRun, force, cwd, agentPaths) return syncOutputJSON(results, dryRun, start, projIgnoreStats, err, extrasEntries) } } @@ -276,9 +277,10 @@ func cmdSync(args []string) error { if jsonOutput { if hasAll && len(cfg.Extras) > 0 { + agentPaths := collectAgentTargetPathsGlobal(cfg) extrasEntries := runExtrasSyncEntries(cfg.Extras, func(extra config.ExtraConfig) string { return config.ResolveExtrasSourceDir(extra, cfg.ExtrasSource, cfg.Source) - }, dryRun, force, "") + }, dryRun, force, "", agentPaths) return syncOutputJSON(results, dryRun, start, ignoreStats, syncErr, extrasEntries) } return syncOutputJSON(results, dryRun, start, ignoreStats, syncErr) diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index 84cec5ab..4dc50f06 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -246,3 +246,61 @@ func reportAgentSyncResult(name, mode string, stats agentSyncStats, dryRun bool) ui.Success("%s: agents %s (up to date)", name, mode) } } + +// collectAgentTargetPathsGlobal returns the set of resolved agent target paths +// for all targets in the global config. Returns nil when agents source does not +// exist or contains no agent files (meaning no real agent sync would happen). +func collectAgentTargetPathsGlobal(cfg *config.Config) map[string]bool { + agentsSource := cfg.EffectiveAgentsSource() + if _, err := os.Stat(agentsSource); err != nil { + return nil + } + agents, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil || len(agents) == 0 { + return nil + } + + builtinAgents := config.DefaultAgentTargets() + paths := make(map[string]bool) + for name := range cfg.Targets { + agentPath := resolveAgentTargetPath(cfg.Targets[name], builtinAgents, name) + if agentPath != "" { + paths[filepath.Clean(agentPath)] = true + } + } + if len(paths) == 0 { + return nil + } + return paths +} + +// collectAgentTargetPathsProject returns the set of resolved agent target paths +// for all targets in the project config. Returns nil when no agents exist. +func collectAgentTargetPathsProject(projectRoot string) map[string]bool { + agentsSource := filepath.Join(projectRoot, ".skillshare", "agents") + if _, err := os.Stat(agentsSource); err != nil { + return nil + } + agents, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil || len(agents) == 0 { + return nil + } + + projCfg, err := config.LoadProject(projectRoot) + if err != nil { + return nil + } + + builtinAgents := config.ProjectAgentTargets() + paths := make(map[string]bool) + for _, entry := range projCfg.Targets { + agentPath := resolveProjectAgentTargetPath(entry, builtinAgents, projectRoot) + if agentPath != "" { + paths[filepath.Clean(agentPath)] = true + } + } + if len(paths) == 0 { + return nil + } + return paths +} diff --git a/cmd/skillshare/sync_extras.go b/cmd/skillshare/sync_extras.go index 773a3341..d41f64cc 100644 --- a/cmd/skillshare/sync_extras.go +++ b/cmd/skillshare/sync_extras.go @@ -13,6 +13,9 @@ import ( "skillshare/internal/ui" ) +// extrasAgentsName is the extras entry name that may overlap with the agents sync system. +const extrasAgentsName = "agents" + type syncExtrasJSONOutput struct { Extras []syncExtrasJSONEntry `json:"extras"` Duration string `json:"duration"` @@ -24,13 +27,14 @@ type syncExtrasJSONEntry struct { } type syncExtrasJSONTarget struct { - Path string `json:"path"` - Mode string `json:"mode"` - Synced int `json:"synced"` - Skipped int `json:"skipped"` - Pruned int `json:"pruned"` - Error string `json:"error,omitempty"` - Warnings []string `json:"warnings,omitempty"` + Path string `json:"path"` + Mode string `json:"mode"` + Synced int `json:"synced"` + Skipped int `json:"skipped"` + Pruned int `json:"pruned"` + Error string `json:"error,omitempty"` + Warnings []string `json:"warnings,omitempty"` + SkippedBy string `json:"skipped_by,omitempty"` } func cmdSyncExtras(args []string) error { @@ -99,6 +103,15 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error ui.Warning("Dry run mode - no changes will be made") } + // Detect overlap between extras "agents" and the agents sync system + var agentTargetPaths map[string]bool + for _, extra := range cfg.Extras { + if extra.Name == extrasAgentsName { + agentTargetPaths = collectAgentTargetPathsGlobal(cfg) + break + } + } + var totalSynced, totalSkipped, totalPruned, totalErrors, totalTargets int var jsonEntries []syncExtrasJSONEntry @@ -134,6 +147,18 @@ func cmdSyncExtrasGlobal(dryRun, force, jsonOutput bool, start time.Time) error mode = "merge" } targetPath := config.ExpandPath(target.Path) + + // Skip extras "agents" targets that overlap with the agents sync system + if extra.Name == extrasAgentsName && isExtrasTargetOverlappingAgents(targetPath, agentTargetPaths) { + if !jsonOutput { + ui.Warning("Skipping extras %q target %s — already managed by agents sync", extra.Name, shortenPath(targetPath)) + } + jsonEntry.Targets = append(jsonEntry.Targets, syncExtrasJSONTarget{ + Path: target.Path, Mode: mode, SkippedBy: extrasAgentsName, + }) + continue + } + result, syncErr := sync.SyncExtra(extraSource, targetPath, mode, dryRun, force, target.Flatten, "") shortTarget := shortenPath(targetPath) @@ -254,6 +279,15 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time ui.Warning("Dry run mode - no changes will be made") } + // Detect overlap between extras "agents" and the agents sync system + var agentTargetPaths map[string]bool + for _, extra := range projCfg.Extras { + if extra.Name == extrasAgentsName { + agentTargetPaths = collectAgentTargetPathsProject(cwd) + break + } + } + var totalSynced, totalSkipped, totalPruned, totalErrors, totalTargets int var jsonEntries []syncExtrasJSONEntry @@ -290,6 +324,17 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time targetPath = filepath.Join(cwd, targetPath) } + // Skip extras "agents" targets that overlap with the agents sync system + if extra.Name == extrasAgentsName && isExtrasTargetOverlappingAgents(targetPath, agentTargetPaths) { + if !jsonOutput { + ui.Warning("Skipping extras %q target %s — already managed by agents sync", extra.Name, shortenPath(targetPath)) + } + jsonEntry.Targets = append(jsonEntry.Targets, syncExtrasJSONTarget{ + Path: target.Path, Mode: mode, SkippedBy: extrasAgentsName, + }) + continue + } + result, syncErr := sync.SyncExtra(extraSource, targetPath, mode, dryRun, force, target.Flatten, cwd) shortTarget := shortenPath(targetPath) @@ -401,7 +446,8 @@ func syncVerb(mode string) string { // runExtrasSync runs extras sync and returns JSON entries without printing. // Used by sync --all --json to merge extras into the skills JSON output. -func runExtrasSyncEntries(extras []config.ExtraConfig, sourceFunc func(config.ExtraConfig) string, dryRun, force bool, projectRoot string) []syncExtrasJSONEntry { +// agentTargetPaths is used to skip extras "agents" targets that overlap with the agents sync system. +func runExtrasSyncEntries(extras []config.ExtraConfig, sourceFunc func(config.ExtraConfig) string, dryRun, force bool, projectRoot string, agentTargetPaths map[string]bool) []syncExtrasJSONEntry { entries := make([]syncExtrasJSONEntry, 0, len(extras)) for _, extra := range extras { extraSource := sourceFunc(extra) @@ -420,6 +466,13 @@ func runExtrasSyncEntries(extras []config.ExtraConfig, sourceFunc func(config.Ex } targetPath := config.ExpandPath(target.Path) + if extra.Name == extrasAgentsName && isExtrasTargetOverlappingAgents(targetPath, agentTargetPaths) { + entry.Targets = append(entry.Targets, syncExtrasJSONTarget{ + Path: targetPath, Mode: mode, SkippedBy: extrasAgentsName, + }) + continue + } + result, syncErr := sync.SyncExtra(extraSource, targetPath, mode, dryRun, force, target.Flatten, projectRoot) jt := syncExtrasJSONTarget{Path: targetPath, Mode: mode} if syncErr != nil { @@ -441,6 +494,15 @@ func runExtrasSyncEntries(extras []config.ExtraConfig, sourceFunc func(config.Ex return entries } +// isExtrasTargetOverlappingAgents checks whether an extras target path overlaps +// with any active agent target path. +func isExtrasTargetOverlappingAgents(targetPath string, agentPaths map[string]bool) bool { + if len(agentPaths) == 0 { + return false + } + return agentPaths[filepath.Clean(targetPath)] +} + // cachedHome caches the home directory for shortenPath. var cachedHome = func() string { h, _ := os.UserHomeDir() diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index 195d6370..54219ef1 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -166,7 +166,7 @@ func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { installOpts := install.InstallOptions{ Kind: "agent", - AgentNames: []string{r.Name}, + AgentNames: []string{filepath.Base(r.Name)}, Force: true, Update: true, } @@ -222,7 +222,8 @@ func filterAgentCheckResults(results []check.AgentCheckResult, names []string) [ } var filtered []check.AgentCheckResult for _, r := range results { - if nameSet[r.Name] { + // Match full path (e.g. "demo/code-reviewer") or basename (e.g. "code-reviewer") + if nameSet[r.Name] || nameSet[filepath.Base(r.Name)] { filtered = append(filtered, r) } } diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go index e3d67bfe..be54662e 100644 --- a/internal/check/agent_check.go +++ b/internal/check/agent_check.go @@ -4,9 +4,9 @@ import ( "encoding/json" "os" "path/filepath" - "strings" "skillshare/internal/install" + "skillshare/internal/resource" "skillshare/internal/utils" ) @@ -22,33 +22,32 @@ type AgentCheckResult struct { // CheckAgents scans the agents source directory for installed agents and // compares their file hashes against metadata to detect drift. +// Uses resource.AgentKind{}.Discover() to recurse into subdirectories. func CheckAgents(agentsDir string) []AgentCheckResult { - entries, err := os.ReadDir(agentsDir) + discovered, err := resource.AgentKind{}.Discover(agentsDir) if err != nil { return nil } var results []AgentCheckResult - - for _, entry := range entries { - name := entry.Name() - - // Agent .md files - if !entry.IsDir() && strings.HasSuffix(strings.ToLower(name), ".md") { - agentName := strings.TrimSuffix(name, ".md") - result := checkOneAgent(agentsDir, agentName, name) - results = append(results, result) - } + for _, d := range discovered { + result := checkOneAgent(d.SourcePath, d.RelPath) + results = append(results, result) } return results } -func checkOneAgent(agentsDir, agentName, fileName string) AgentCheckResult { - result := AgentCheckResult{Name: agentName} +// checkOneAgent checks a single agent file. sourcePath is the absolute path +// to the .md file; relPath is relative to the agents root (e.g. "demo/code-reviewer.md"). +func checkOneAgent(sourcePath, relPath string) AgentCheckResult { + fileName := filepath.Base(relPath) + agentName := fileName[:len(fileName)-len(".md")] + result := AgentCheckResult{Name: relPath[:len(relPath)-len(".md")]} - // Look for metadata file: .skillshare-meta.json - metaPath := filepath.Join(agentsDir, agentName+".skillshare-meta.json") + // Look for sidecar metadata: .skillshare-meta.json alongside the .md file + dir := filepath.Dir(sourcePath) + metaPath := filepath.Join(dir, agentName+".skillshare-meta.json") metaData, err := os.ReadFile(metaPath) if err != nil { result.Status = "local" @@ -67,13 +66,12 @@ func checkOneAgent(agentsDir, agentName, fileName string) AgentCheckResult { result.RepoURL = meta.RepoURL // Compare file hash - agentPath := filepath.Join(agentsDir, fileName) if meta.FileHashes == nil || meta.FileHashes[fileName] == "" { result.Status = "local" return result } - currentHash, err := utils.FileHashFormatted(agentPath) + currentHash, err := utils.FileHashFormatted(sourcePath) if err != nil { result.Status = "error" result.Message = "cannot hash file" diff --git a/internal/check/agent_check_test.go b/internal/check/agent_check_test.go index 2990d4e8..faffaffd 100644 --- a/internal/check/agent_check_test.go +++ b/internal/check/agent_check_test.go @@ -87,6 +87,38 @@ func TestCheckAgents_NonExistentDir(t *testing.T) { } } +func TestCheckAgents_Nested(t *testing.T) { + dir := t.TempDir() + subdir := filepath.Join(dir, "demo") + os.MkdirAll(subdir, 0755) + + agentFile := filepath.Join(subdir, "tutor.md") + os.WriteFile(agentFile, []byte("# Tutor"), 0644) + + hash, _ := utils.FileHashFormatted(agentFile) + meta := &install.SkillMeta{ + Source: "https://github.com/example/repo", + Kind: "agent", + FileHashes: map[string]string{"tutor.md": hash}, + } + metaData, _ := json.MarshalIndent(meta, "", " ") + os.WriteFile(filepath.Join(subdir, "tutor.skillshare-meta.json"), metaData, 0644) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Name != "demo/tutor" { + t.Errorf("Name = %q, want %q", results[0].Name, "demo/tutor") + } + if results[0].Status != "up_to_date" { + t.Errorf("Status = %q, want %q", results[0].Status, "up_to_date") + } + if results[0].Source != "https://github.com/example/repo" { + t.Errorf("Source = %q, want non-empty", results[0].Source) + } +} + func TestCheckAgents_SkipsNonMd(t *testing.T) { dir := t.TempDir() os.WriteFile(filepath.Join(dir, "tutor.md"), []byte("# Tutor"), 0644) diff --git a/tests/integration/sync_extras_test.go b/tests/integration/sync_extras_test.go index 44783296..7ea59004 100644 --- a/tests/integration/sync_extras_test.go +++ b/tests/integration/sync_extras_test.go @@ -520,3 +520,232 @@ extras: t.Error("config should contain flatten: true") } } + +// --- Extras "agents" overlap with agents sync --- + +func TestSyncExtras_AgentsOverlap_Skipped(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + // Create agents source with a real agent (the regular agents sync system) + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "helper.md"), []byte("# Helper Agent"), 0644) + + // Create extras source for "agents" with a file + sourceRoot := filepath.Dir(sb.SourcePath) + extrasAgentsSource := filepath.Join(sourceRoot, "extras", "agents") + os.MkdirAll(extrasAgentsSource, 0755) + os.WriteFile(filepath.Join(extrasAgentsSource, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + targetPath + ` + agents: + path: ` + claudeAgents + ` +extras: + - name: agents + targets: + - path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "extras") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Skipping extras") + result.AssertAnyOutputContains(t, "already managed by agents sync") +} + +func TestSyncExtras_AgentsOverlap_NoAgentsSource(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + // NO agents source directory — extras "agents" should sync normally + sourceRoot := filepath.Dir(sb.SourcePath) + extrasAgentsSource := filepath.Join(sourceRoot, "extras", "agents") + os.MkdirAll(extrasAgentsSource, 0755) + os.WriteFile(filepath.Join(extrasAgentsSource, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + targetPath + ` + agents: + path: ` + claudeAgents + ` +extras: + - name: agents + targets: + - path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "extras") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "Skipping extras") + + // Extras agent file should be synced normally + if !sb.FileExists(filepath.Join(claudeAgents, "extra-agent.md")) { + t.Error("extra-agent.md should be synced when no agents source exists") + } +} + +func TestSyncExtras_AgentsOverlap_PartialSkip(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + // Create agents source + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "helper.md"), []byte("# Helper Agent"), 0644) + + // Create extras "agents" source + sourceRoot := filepath.Dir(sb.SourcePath) + extrasAgentsSource := filepath.Join(sourceRoot, "extras", "agents") + os.MkdirAll(extrasAgentsSource, 0755) + os.WriteFile(filepath.Join(extrasAgentsSource, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + customTarget := filepath.Join(sb.Home, "custom-agents") + os.MkdirAll(claudeAgents, 0755) + os.MkdirAll(customTarget, 0755) + + // Two targets: one overlaps with agents, one doesn't + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + targetPath + ` + agents: + path: ` + claudeAgents + ` +extras: + - name: agents + targets: + - path: ` + claudeAgents + ` + - path: ` + customTarget + ` +`) + + result := sb.RunCLI("sync", "extras") + result.AssertSuccess(t) + + // Overlapping target should be skipped + result.AssertAnyOutputContains(t, "Skipping extras") + + // Non-overlapping target should be synced + if !sb.FileExists(filepath.Join(customTarget, "extra-agent.md")) { + t.Error("extra-agent.md should be synced to non-overlapping target") + } +} + +func TestSyncExtras_AgentsOverlap_NonAgentsNameNotSkipped(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + // Create agents source (real agents system active) + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "helper.md"), []byte("# Helper Agent"), 0644) + + // Create extras "rules" that targets the agents directory + sourceRoot := filepath.Dir(sb.SourcePath) + rulesSource := filepath.Join(sourceRoot, "extras", "rules") + os.MkdirAll(rulesSource, 0755) + os.WriteFile(filepath.Join(rulesSource, "rule.md"), []byte("# Rule"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeAgents, 0755) + + // extras "rules" targets the same path as agents — should NOT be skipped (name != "agents") + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + targetPath + ` + agents: + path: ` + claudeAgents + ` +extras: + - name: rules + targets: + - path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "extras") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "Skipping extras") + + if !sb.FileExists(filepath.Join(claudeAgents, "rule.md")) { + t.Error("rule.md should be synced — extras named 'rules' should not be affected by agents overlap") + } +} + +func TestSyncExtras_AgentsOverlap_NoTargetOverlap(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + // Create agents source + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "helper.md"), []byte("# Helper Agent"), 0644) + + // Create extras "agents" source targeting a DIFFERENT path + sourceRoot := filepath.Dir(sb.SourcePath) + extrasAgentsSource := filepath.Join(sourceRoot, "extras", "agents") + os.MkdirAll(extrasAgentsSource, 0755) + os.WriteFile(filepath.Join(extrasAgentsSource, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + customTarget := filepath.Join(sb.Home, "my-agents") + os.MkdirAll(customTarget, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + targetPath + ` + agents: + path: ` + claudeAgents + ` +extras: + - name: agents + targets: + - path: ` + customTarget + ` +`) + + result := sb.RunCLI("sync", "extras") + result.AssertSuccess(t) + result.AssertOutputNotContains(t, "Skipping extras") + + if !sb.FileExists(filepath.Join(customTarget, "extra-agent.md")) { + t.Error("extra-agent.md should be synced to non-overlapping target") + } +} From ebcfaf6bc46940a94121297de1eb2c6d3a5c3322 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 02:37:42 +0800 Subject: [PATCH 072/205] feat(agents): add --group support, fix nested discovery, and improve TUI dispatch - CheckAgents uses recursive discovery via resource.AgentKind{}.Discover() so nested agents (e.g. demo/code-reviewer.md) are found correctly - Add --group/-G flag to update, check, and uninstall agents commands with shared validateAgentGroups/matchesAnyGroup helpers - Fix TUI project-mode dispatch: call cmdUpdateAgentsProject and cmdUninstallAgents directly instead of passing 'agents' keyword to handlers that don't understand it - TUI confirmation overlay shows correct command with 'agents' keyword and confirmKind field tracks skill vs agent - TUI toggleDisabled (E key) writes to .agentignore for agents instead of .skillignore - Agent Discover now marks .agentignore matches as Disabled=true instead of skipping them, so disabled agents appear in list with [disabled] tag - Sync filters out disabled agents via resource.ActiveAgents() - auditSkillByName uses scanPathTarget to support single-file agent audit - Uninstall agents display uses RelPath (e.g. demo/tutor) instead of frontmatter name (e.g. Python Tutor Expert) --- cmd/skillshare/audit.go | 8 ++- cmd/skillshare/check.go | 25 ++++++-- cmd/skillshare/list_project.go | 8 ++- cmd/skillshare/list_tui.go | 16 ++++- cmd/skillshare/sync_agents.go | 8 ++- cmd/skillshare/uninstall.go | 3 +- cmd/skillshare/uninstall_agents.go | 52 ++++++++++----- cmd/skillshare/update.go | 3 +- cmd/skillshare/update_agents.go | 95 ++++++++++++++++++++++++++-- internal/resource/agent.go | 18 ++++-- internal/resource/kind_test.go | 28 ++++++-- tests/integration/agent_crud_test.go | 6 +- 12 files changed, 222 insertions(+), 48 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index ea4fd1bb..ef34c1e8 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -770,7 +770,7 @@ func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, po } start := time.Now() - result, err := scanSkillPath(skillPath, projectRoot, reg) + result, err := scanPathTarget(skillPath, projectRoot, reg) if err != nil { return nil, summary, fmt.Errorf("scan error: %w", err) } @@ -786,7 +786,11 @@ func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, po summary.Skill = name summary.Mode = mode if format == formatText { - subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning skill: %s", name), mode, sourcePath, threshold, policyLine) + label := "skill" + if strings.HasSuffix(strings.ToLower(name), ".md") { + label = "agent" + } + subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %s: %s", label, name), mode, sourcePath, threshold, policyLine) summaryLines := buildAuditSummaryLines(summary) minWidth := auditHeaderMinWidth(subtitle) ui.HeaderBoxWithMinWidth(auditHeaderTitle(mode), subtitle, minWidth) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 24db9949..5d95e0aa 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -193,7 +193,7 @@ func cmdCheck(args []string) error { cfgPath = config.ProjectConfigPath(cwd) if kind == kindAgents { agentsDir := filepath.Join(cwd, ".skillshare", "agents") - renderAgentCheck(agentsDir, opts.json) + renderAgentCheck(agentsDir, opts.groups, opts.json) logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, nil) return nil } @@ -210,7 +210,7 @@ func cmdCheck(args []string) error { // Agent-only check: scan agents source directory and skip repo checks. if kind == kindAgents { agentsDir := cfg.EffectiveAgentsSource() - renderAgentCheck(agentsDir, opts.json) + renderAgentCheck(agentsDir, opts.groups, opts.json) logCheckOp(cfgPath, 0, 0, 0, 0, scope, start, nil) return nil } @@ -910,8 +910,23 @@ func formatSourceShort(source string) string { } // renderAgentCheck runs CheckAgents and displays results (text or JSON). -func renderAgentCheck(agentsDir string, jsonMode bool) { +// If groups is non-empty, only agents in those group subdirectories are shown. +func renderAgentCheck(agentsDir string, groups []string, jsonMode bool) { agentResults := check.CheckAgents(agentsDir) + + if len(groups) > 0 { + filtered, err := filterAgentResultsByGroups(agentResults, groups, agentsDir) + if err != nil { + if jsonMode { + writeJSONError(err) //nolint:errcheck + return + } + ui.Error("%v", err) + return + } + agentResults = filtered + } + if jsonMode { out, _ := json.MarshalIndent(agentResults, "", " ") fmt.Println(string(out)) @@ -968,5 +983,7 @@ Examples: skillshare check --group frontend # Check all skills in frontend/ skillshare check x -G backend # Mix names and groups skillshare check --json # Output as JSON (for CI) - skillshare check -p # Check project skills`) + skillshare check -p # Check project skills + skillshare check agents # Check all agents + skillshare check agents -G demo # Check agents in demo/`) } diff --git a/cmd/skillshare/list_project.go b/cmd/skillshare/list_project.go index a8b9308b..e23c0cf9 100644 --- a/cmd/skillshare/list_project.go +++ b/cmd/skillshare/list_project.go @@ -3,6 +3,7 @@ package main import ( "fmt" "path/filepath" + "time" "skillshare/internal/config" "skillshare/internal/sync" @@ -69,14 +70,15 @@ func cmdListProject(root string, opts listOptions, kind resourceKindFilter) erro return cmdAudit([]string{"-p", skillName}) case "update": if skillKind == "agent" { - _, updateErr := cmdUpdateProject([]string{"agents", skillName}, root) - return updateErr + return cmdUpdateAgentsProject([]string{skillName}, root, time.Now()) } _, updateErr := cmdUpdateProject([]string{skillName}, root) return updateErr case "uninstall": if skillKind == "agent" { - return cmdUninstallProject([]string{"agents", "--force", skillName}, root) + agentsDir := filepath.Join(root, ".skillshare", "agents") + uOpts := &uninstallOptions{skillNames: []string{skillName}, force: true} + return cmdUninstallAgents(agentsDir, uOpts, config.ProjectConfigPath(root), time.Now()) } return cmdUninstallProject([]string{"--force", skillName}, root) } diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index 499c6039..ecb7480b 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -105,6 +105,7 @@ type listTUIModel struct { confirming bool // true when confirmation overlay is shown confirmAction string // "audit", "update", "uninstall" confirmSkill string // skill name for confirmation display + confirmKind string // "skill" or "agent" // Content viewer overlay — dual-pane: left tree + right content showContent bool @@ -361,6 +362,7 @@ func (m listTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.confirming = false m.confirmAction = "" m.confirmSkill = "" + m.confirmKind = "" return m, nil } return m, nil @@ -470,6 +472,7 @@ func (m listTUIModel) enterConfirm(action string) (tea.Model, tea.Cmd) { m.confirming = true m.confirmAction = action m.confirmSkill = name + m.confirmKind = item.entry.Kind return m, nil } @@ -481,7 +484,12 @@ func (m listTUIModel) toggleDisabled() (tea.Model, tea.Cmd) { return m, nil } - ignorePath := filepath.Join(m.sourcePath, ".skillignore") + var ignorePath string + if item.entry.Kind == "agent" { + ignorePath = filepath.Join(m.agentsSourcePath, ".agentignore") + } else { + ignorePath = filepath.Join(m.sourcePath, ".skillignore") + } pattern := item.entry.RelPath if pattern == "" { pattern = item.entry.Name @@ -552,7 +560,11 @@ func (m listTUIModel) View() string { if m.modeLabel == "project" { flag = "-p" } - cmd := fmt.Sprintf("skillshare %s %s %s", m.confirmAction, flag, m.confirmSkill) + kindArg := "" + if m.confirmKind == "agent" { + kindArg = "agents " + } + cmd := fmt.Sprintf("skillshare %s %s%s %s", m.confirmAction, kindArg, flag, m.confirmSkill) if m.confirmAction == "uninstall" { return fmt.Sprintf("\n %s\n\n → %s\n\n Proceed? [Y/n] ", tc.Red.Render("Uninstall "+m.confirmSkill+"?"), cmd) diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index 4dc50f06..7cae5611 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -35,11 +35,12 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start return agentSyncStats{}, fmt.Errorf("cannot access agents source: %w", err) } - // Discover agents - agents, err := resource.AgentKind{}.Discover(agentsSource) + // Discover agents (excludes disabled from sync) + allAgents, err := resource.AgentKind{}.Discover(agentsSource) if err != nil { return agentSyncStats{}, fmt.Errorf("cannot discover agents: %w", err) } + agents := resource.ActiveAgents(allAgents) if len(agents) == 0 { if !jsonOutput { @@ -128,10 +129,11 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start return fmt.Errorf("cannot access project agents: %w", err) } - agents, err := resource.AgentKind{}.Discover(agentsSource) + allAgents, err := resource.AgentKind{}.Discover(agentsSource) if err != nil { return fmt.Errorf("cannot discover project agents: %w", err) } + agents := resource.ActiveAgents(allAgents) if len(agents) == 0 { if !jsonOutput { diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 2ba18b9f..506ff1b3 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -1211,5 +1211,6 @@ Examples: skillshare uninstall _team-repo # Remove tracked repository skillshare uninstall team-repo # _ prefix is optional skillshare uninstall agents tutor # Uninstall an agent - skillshare uninstall agents --all # Uninstall all agents`) + skillshare uninstall agents --all # Uninstall all agents + skillshare uninstall agents -G demo # Uninstall all agents in demo/`) } diff --git a/cmd/skillshare/uninstall_agents.go b/cmd/skillshare/uninstall_agents.go index 6bba5024..5108b3bb 100644 --- a/cmd/skillshare/uninstall_agents.go +++ b/cmd/skillshare/uninstall_agents.go @@ -50,24 +50,45 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string return fmt.Errorf("agent %q not found in %s", input, agentsDir) } } + + // Resolve --group targets + if len(opts.groups) > 0 { + groupFiltered, err := filterDiscoveredAgentsByGroups(discovered, opts.groups, agentsDir) + if err != nil { + return err + } + if len(groupFiltered) == 0 { + return fmt.Errorf("no agents found in group(s): %s", strings.Join(opts.groups, ", ")) + } + // Deduplicate against already-resolved name targets + seen := make(map[string]bool, len(targets)) + for _, t := range targets { + seen[t.RelPath] = true + } + for _, d := range groupFiltered { + if !seen[d.RelPath] { + targets = append(targets, d) + } + } + } } if len(targets) == 0 { - return fmt.Errorf("specify agent name(s) or --all") + return fmt.Errorf("specify agent name(s), --group, or --all") } // Confirmation (unless --force or --json) if !opts.force && !opts.jsonOutput { ui.Warning("Uninstalling %d agent(s)", len(targets)) const maxDisplay = 20 - if len(targets) <= maxDisplay { - for _, t := range targets { - fmt.Printf(" - %s\n", t.Name) - } - } else { - for _, t := range targets[:maxDisplay] { - fmt.Printf(" - %s\n", t.Name) - } + display := targets + if len(display) > maxDisplay { + display = display[:maxDisplay] + } + for _, t := range display { + fmt.Printf(" - %s\n", strings.TrimSuffix(t.RelPath, ".md")) + } + if len(targets) > maxDisplay { fmt.Printf(" ... and %d more\n", len(targets)-maxDisplay) } fmt.Println() @@ -90,21 +111,22 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string metaName := strings.TrimSuffix(filepath.Base(t.RelPath), ".md") metaFile := filepath.Join(filepath.Dir(agentFile), metaName+".skillshare-meta.json") + displayName := strings.TrimSuffix(t.RelPath, ".md") if opts.dryRun { - ui.Info("[dry-run] Would remove agent: %s", t.Name) - removed = append(removed, t.Name) + ui.Info("[dry-run] Would remove agent: %s", displayName) + removed = append(removed, displayName) continue } _, err := trash.MoveAgentToTrash(agentFile, metaFile, t.Name, trashBase) if err != nil { - ui.Error("Failed to remove %s: %v", t.Name, err) - failed = append(failed, t.Name) + ui.Error("Failed to remove %s: %v", displayName, err) + failed = append(failed, displayName) continue } - ui.Success("Removed agent: %s", t.Name) - removed = append(removed, t.Name) + ui.Success("Removed agent: %s", displayName) + removed = append(removed, displayName) } // JSON output diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 704b2beb..b25d15de 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -547,6 +547,5 @@ Examples: skillshare update --all --prune # Update all + remove stale skills skillshare update agents --all # Update all agents skillshare update agents tutor # Update a single agent - -Note: --group is not supported for agents.`) + skillshare update agents -G demo # Update all agents in demo/`) } diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index 54219ef1..73fe06d9 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -12,6 +12,7 @@ import ( "skillshare/internal/config" "skillshare/internal/install" "skillshare/internal/oplog" + "skillshare/internal/resource" "skillshare/internal/ui" ) @@ -50,6 +51,18 @@ func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { } } + // Filter by group if specified + if len(opts.groups) > 0 { + var err error + results, err = filterAgentResultsByGroups(results, opts.groups, agentsDir) + if err != nil { + return err + } + if len(results) == 0 { + return fmt.Errorf("no agents found in group(s): %s", strings.Join(opts.groups, ", ")) + } + } + // Only check agents that have remote sources var tracked []check.AgentCheckResult for _, r := range results { @@ -178,6 +191,7 @@ func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { // updateAgentArgs holds parsed arguments for agent update. type updateAgentArgs struct { names []string + groups []string all bool dryRun bool jsonOutput bool @@ -195,7 +209,11 @@ func parseUpdateAgentArgs(args []string) (*updateAgentArgs, bool, error) { case arg == "--json": opts.jsonOutput = true case arg == "--group" || arg == "-G": - return nil, false, fmt.Errorf("--group is not supported for agents") + i++ + if i >= len(args) { + return nil, false, fmt.Errorf("--group requires a value") + } + opts.groups = append(opts.groups, args[i]) case arg == "--help" || arg == "-h": return nil, true, nil case strings.HasPrefix(arg, "-"): @@ -205,11 +223,11 @@ func parseUpdateAgentArgs(args []string) (*updateAgentArgs, bool, error) { } } - if !opts.all && len(opts.names) == 0 { - return nil, false, fmt.Errorf("specify agent name(s) or --all") + if !opts.all && len(opts.names) == 0 && len(opts.groups) == 0 { + return nil, false, fmt.Errorf("specify agent name(s), --group, or --all") } - if opts.all && len(opts.names) > 0 { - return nil, false, fmt.Errorf("--all cannot be used with agent names") + if opts.all && (len(opts.names) > 0 || len(opts.groups) > 0) { + return nil, false, fmt.Errorf("--all cannot be used with agent names or --group") } return opts, false, nil @@ -219,6 +237,8 @@ func filterAgentCheckResults(results []check.AgentCheckResult, names []string) [ nameSet := make(map[string]bool, len(names)) for _, n := range names { nameSet[n] = true + // Also index without .md suffix so "demo/tutor.md" matches "demo/tutor" + nameSet[strings.TrimSuffix(n, ".md")] = true } var filtered []check.AgentCheckResult for _, r := range results { @@ -230,6 +250,60 @@ func filterAgentCheckResults(results []check.AgentCheckResult, names []string) [ return filtered } +// validateAgentGroups checks that each group name corresponds to a subdirectory +// under agentsDir. Returns normalized group names (trailing "/" stripped). +func validateAgentGroups(groups []string, agentsDir string) ([]string, error) { + normalized := make([]string, len(groups)) + for i, group := range groups { + group = strings.TrimSuffix(group, "/") + info, err := os.Stat(filepath.Join(agentsDir, group)) + if err != nil || !info.IsDir() { + return nil, fmt.Errorf("agent group %q not found in %s", group, agentsDir) + } + normalized[i] = group + } + return normalized, nil +} + +func matchesAnyGroup(name string, groups []string) bool { + for _, group := range groups { + if strings.HasPrefix(name, group+"/") { + return true + } + } + return false +} + +// filterAgentResultsByGroups filters agent check results to those in the given groups. +func filterAgentResultsByGroups(results []check.AgentCheckResult, groups []string, agentsDir string) ([]check.AgentCheckResult, error) { + groups, err := validateAgentGroups(groups, agentsDir) + if err != nil { + return nil, err + } + var filtered []check.AgentCheckResult + for _, r := range results { + if matchesAnyGroup(r.Name, groups) { + filtered = append(filtered, r) + } + } + return filtered, nil +} + +// filterDiscoveredAgentsByGroups filters discovered agents to those in the given groups. +func filterDiscoveredAgentsByGroups(discovered []resource.DiscoveredResource, groups []string, agentsDir string) ([]resource.DiscoveredResource, error) { + groups, err := validateAgentGroups(groups, agentsDir) + if err != nil { + return nil, err + } + var filtered []resource.DiscoveredResource + for _, d := range discovered { + if matchesAnyGroup(strings.TrimSuffix(d.RelPath, ".md"), groups) { + filtered = append(filtered, d) + } + } + return filtered, nil +} + func logUpdateAgentOp(cfgPath string, total, updated, failed int, dryRun bool, start time.Time) { status := "ok" if failed > 0 && updated > 0 { @@ -307,6 +381,17 @@ func cmdUpdateAgentsProject(args []string, projectRoot string, start time.Time) } } + if len(opts.groups) > 0 { + var err error + results, err = filterAgentResultsByGroups(results, opts.groups, agentsDir) + if err != nil { + return err + } + if len(results) == 0 { + return fmt.Errorf("no agents found in group(s): %s", strings.Join(opts.groups, ", ")) + } + } + var tracked []check.AgentCheckResult for _, r := range results { if r.Source != "" { diff --git a/internal/resource/agent.go b/internal/resource/agent.go index 994c773a..1a217d87 100644 --- a/internal/resource/agent.go +++ b/internal/resource/agent.go @@ -69,10 +69,8 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { } relPath = strings.ReplaceAll(relPath, "\\", "/") - // Apply .agentignore matching - if ignoreMatcher.HasRules() && ignoreMatcher.Match(relPath, false) { - return nil - } + // Apply .agentignore matching — mark as disabled but still include + disabled := ignoreMatcher.HasRules() && ignoreMatcher.Match(relPath, false) name := agentNameFromFile(path, info.Name()) @@ -84,6 +82,7 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { RelPath: relPath, AbsPath: path, IsNested: isNested, + Disabled: disabled, FlatName: AgentFlatName(relPath), SourcePath: filepath.Join(sourceDir, relPath), }) @@ -127,6 +126,17 @@ func AgentFlatName(relPath string) string { return filepath.Base(relPath) } +// ActiveAgents returns only non-disabled agents from the given slice. +func ActiveAgents(agents []DiscoveredResource) []DiscoveredResource { + active := make([]DiscoveredResource, 0, len(agents)) + for _, a := range agents { + if !a.Disabled { + active = append(active, a) + } + } + return active +} + // CreateLink creates a file symlink from dst pointing to src. func (AgentKind) CreateLink(src, dst string) error { return os.Symlink(src, dst) diff --git a/internal/resource/kind_test.go b/internal/resource/kind_test.go index b46f4121..961126d5 100644 --- a/internal/resource/kind_test.go +++ b/internal/resource/kind_test.go @@ -313,11 +313,31 @@ func TestAgentKind_Discover_RespectsAgentignore(t *testing.T) { t.Fatalf("Discover error: %v", err) } - if len(resources) != 1 { - t.Fatalf("expected 1 resource (ignored filtered out), got %d", len(resources)) + if len(resources) != 2 { + t.Fatalf("expected 2 resources (ignored included as disabled), got %d", len(resources)) + } + + // Find each by name and check Disabled flag + var active, ignored *DiscoveredResource + for i := range resources { + switch resources[i].Name { + case "active": + active = &resources[i] + case "ignored": + ignored = &resources[i] + } + } + if active == nil { + t.Fatal("active agent not found") + } + if active.Disabled { + t.Error("active agent should not be disabled") + } + if ignored == nil { + t.Fatal("ignored agent not found") } - if resources[0].Name != "active" { - t.Errorf("Name = %q, want %q", resources[0].Name, "active") + if !ignored.Disabled { + t.Error("ignored agent should be disabled") } } diff --git a/tests/integration/agent_crud_test.go b/tests/integration/agent_crud_test.go index 2b4dceb1..03dd1183 100644 --- a/tests/integration/agent_crud_test.go +++ b/tests/integration/agent_crud_test.go @@ -40,7 +40,7 @@ func TestUpdate_Agents_LocalOnly(t *testing.T) { result.AssertAnyOutputContains(t, "local") } -func TestUpdate_Agents_GroupNotSupported(t *testing.T) { +func TestUpdate_Agents_GroupInvalidDir(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() @@ -49,9 +49,9 @@ func TestUpdate_Agents_GroupNotSupported(t *testing.T) { }) sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") - result := sb.RunCLI("update", "agents", "--group", "mygroup") + result := sb.RunCLI("update", "agents", "--group", "nonexistent") result.AssertFailure(t) - result.AssertAnyOutputContains(t, "not supported for agents") + result.AssertAnyOutputContains(t, "not found") } func TestUpdate_Agents_RequiresNameOrAll(t *testing.T) { From dad90d79d5a59630bb570b872abf6c5c3d63fd9c Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 03:15:01 +0800 Subject: [PATCH 073/205] feat: add MetadataStore core types and CRUD operations Introduces MetadataEntry (merges SkillMeta + RegistryEntry) and MetadataStore (map-based container with Set/Get/Remove/Has/List). This is the foundation for centralized .metadata.json files. --- internal/install/metadata.go | 91 +++++++++++++ internal/install/metadata_test.go | 208 ++++++++++++++++++++++++++++++ 2 files changed, 299 insertions(+) create mode 100644 internal/install/metadata.go create mode 100644 internal/install/metadata_test.go diff --git a/internal/install/metadata.go b/internal/install/metadata.go new file mode 100644 index 00000000..32c8f07c --- /dev/null +++ b/internal/install/metadata.go @@ -0,0 +1,91 @@ +package install + +import ( + "sort" + "time" +) + +// MetadataFileName is the centralized metadata file stored in each directory. +const MetadataFileName = ".metadata.json" + +// MetadataStore holds all entries for a single directory (skills/ or agents/). +type MetadataStore struct { + Version int `json:"version"` + Entries map[string]*MetadataEntry `json:"entries"` +} + +// MetadataEntry merges the old SkillMeta + RegistryEntry fields. +type MetadataEntry struct { + // Registry fields + Source string `json:"source"` + Kind string `json:"kind,omitempty"` + Type string `json:"type,omitempty"` + Tracked bool `json:"tracked,omitempty"` + Group string `json:"group,omitempty"` + Branch string `json:"branch,omitempty"` + Into string `json:"into,omitempty"` + Name string `json:"-"` // runtime only, not persisted (map key is the name) + + // Meta fields + InstalledAt time.Time `json:"installed_at,omitzero"` + RepoURL string `json:"repo_url,omitempty"` + Subdir string `json:"subdir,omitempty"` + Version string `json:"version,omitempty"` + TreeHash string `json:"tree_hash,omitempty"` + FileHashes map[string]string `json:"file_hashes,omitempty"` +} + +// NewMetadataStore returns an empty store with version 1. +func NewMetadataStore() *MetadataStore { + return &MetadataStore{ + Version: 1, + Entries: make(map[string]*MetadataEntry), + } +} + +// Get returns the entry for the given name, or nil if not found. +func (s *MetadataStore) Get(name string) *MetadataEntry { + return s.Entries[name] +} + +// Set adds or replaces an entry. +func (s *MetadataStore) Set(name string, entry *MetadataEntry) { + s.Entries[name] = entry +} + +// Remove deletes an entry by name. +func (s *MetadataStore) Remove(name string) { + delete(s.Entries, name) +} + +// Has returns true if an entry exists for the given name. +func (s *MetadataStore) Has(name string) bool { + _, ok := s.Entries[name] + return ok +} + +// List returns sorted entry names. +func (s *MetadataStore) List() []string { + names := make([]string, 0, len(s.Entries)) + for name := range s.Entries { + names = append(names, name) + } + sort.Strings(names) + return names +} + +// EffectiveKind returns "skill" if Kind is empty. +func (e *MetadataEntry) EffectiveKind() string { + if e.Kind == "" { + return "skill" + } + return e.Kind +} + +// FullName returns "group/name" if Group is set, otherwise Name. +func (e *MetadataEntry) FullName() string { + if e.Group != "" { + return e.Group + "/" + e.Name + } + return e.Name +} diff --git a/internal/install/metadata_test.go b/internal/install/metadata_test.go new file mode 100644 index 00000000..47a4ae47 --- /dev/null +++ b/internal/install/metadata_test.go @@ -0,0 +1,208 @@ +package install + +import ( + "testing" + "time" +) + +func TestMetadataStore_SetAndGet(t *testing.T) { + s := NewMetadataStore() + now := time.Now() + entry := &MetadataEntry{ + Source: "org/repo", + Kind: "skill", + Type: "github", + Tracked: true, + Group: "mygroup", + Branch: "main", + Into: "frontend", + InstalledAt: now, + RepoURL: "https://github.com/org/repo.git", + Subdir: "skills/foo", + Version: "abc123", + TreeHash: "deadbeef", + FileHashes: map[string]string{"SKILL.md": "sha256:aabbcc"}, + } + + s.Set("foo", entry) + got := s.Get("foo") + + if got == nil { + t.Fatal("Get returned nil after Set") + } + if got.Source != entry.Source { + t.Errorf("Source = %q, want %q", got.Source, entry.Source) + } + if got.Kind != entry.Kind { + t.Errorf("Kind = %q, want %q", got.Kind, entry.Kind) + } + if got.Type != entry.Type { + t.Errorf("Type = %q, want %q", got.Type, entry.Type) + } + if got.Tracked != entry.Tracked { + t.Errorf("Tracked = %v, want %v", got.Tracked, entry.Tracked) + } + if got.Group != entry.Group { + t.Errorf("Group = %q, want %q", got.Group, entry.Group) + } + if got.Branch != entry.Branch { + t.Errorf("Branch = %q, want %q", got.Branch, entry.Branch) + } + if got.Into != entry.Into { + t.Errorf("Into = %q, want %q", got.Into, entry.Into) + } + if !got.InstalledAt.Equal(entry.InstalledAt) { + t.Errorf("InstalledAt = %v, want %v", got.InstalledAt, entry.InstalledAt) + } + if got.RepoURL != entry.RepoURL { + t.Errorf("RepoURL = %q, want %q", got.RepoURL, entry.RepoURL) + } + if got.Subdir != entry.Subdir { + t.Errorf("Subdir = %q, want %q", got.Subdir, entry.Subdir) + } + if got.Version != entry.Version { + t.Errorf("Version = %q, want %q", got.Version, entry.Version) + } + if got.TreeHash != entry.TreeHash { + t.Errorf("TreeHash = %q, want %q", got.TreeHash, entry.TreeHash) + } + if len(got.FileHashes) != 1 || got.FileHashes["SKILL.md"] != "sha256:aabbcc" { + t.Errorf("FileHashes = %v, want map with one entry", got.FileHashes) + } +} + +func TestMetadataStore_GetMissing(t *testing.T) { + s := NewMetadataStore() + got := s.Get("nonexistent") + if got != nil { + t.Errorf("Get nonexistent = %v, want nil", got) + } +} + +func TestMetadataStore_Has(t *testing.T) { + s := NewMetadataStore() + s.Set("present", &MetadataEntry{Source: "org/repo"}) + + if !s.Has("present") { + t.Error("Has(present) = false, want true") + } + if s.Has("absent") { + t.Error("Has(absent) = true, want false") + } +} + +func TestMetadataStore_Remove(t *testing.T) { + s := NewMetadataStore() + s.Set("to-remove", &MetadataEntry{Source: "org/repo"}) + + if !s.Has("to-remove") { + t.Fatal("entry should exist before Remove") + } + + s.Remove("to-remove") + + if s.Has("to-remove") { + t.Error("entry still present after Remove") + } + if s.Get("to-remove") != nil { + t.Error("Get after Remove should return nil") + } +} + +func TestMetadataStore_Remove_Nonexistent(t *testing.T) { + s := NewMetadataStore() + // Should not panic + s.Remove("nonexistent") +} + +func TestMetadataStore_List(t *testing.T) { + s := NewMetadataStore() + s.Set("zebra", &MetadataEntry{}) + s.Set("alpha", &MetadataEntry{}) + s.Set("mango", &MetadataEntry{}) + + names := s.List() + + if len(names) != 3 { + t.Fatalf("List() = %v, want 3 entries", names) + } + want := []string{"alpha", "mango", "zebra"} + for i, w := range want { + if names[i] != w { + t.Errorf("List()[%d] = %q, want %q", i, names[i], w) + } + } +} + +func TestMetadataStore_List_Empty(t *testing.T) { + s := NewMetadataStore() + names := s.List() + if len(names) != 0 { + t.Errorf("List() on empty store = %v, want []", names) + } +} + +func TestMetadataEntry_EffectiveKind(t *testing.T) { + tests := []struct { + name string + kind string + want string + }{ + {"empty kind defaults to skill", "", "skill"}, + {"explicit skill", "skill", "skill"}, + {"agent", "agent", "agent"}, + {"custom kind preserved", "custom", "custom"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &MetadataEntry{Kind: tt.kind} + got := e.EffectiveKind() + if got != tt.want { + t.Errorf("EffectiveKind() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestMetadataEntry_FullName(t *testing.T) { + tests := []struct { + name string + group string + entry string + want string + }{ + {"no group", "", "my-skill", "my-skill"}, + {"with group", "frontend", "my-skill", "frontend/my-skill"}, + {"nested group", "team/frontend", "my-skill", "team/frontend/my-skill"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &MetadataEntry{ + Name: tt.entry, + Group: tt.group, + } + got := e.FullName() + if got != tt.want { + t.Errorf("FullName() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestNewMetadataStore_InitialState(t *testing.T) { + s := NewMetadataStore() + if s == nil { + t.Fatal("NewMetadataStore returned nil") + } + if s.Version != 1 { + t.Errorf("Version = %d, want 1", s.Version) + } + if s.Entries == nil { + t.Error("Entries map is nil") + } + if len(s.Entries) != 0 { + t.Errorf("Entries not empty on new store: %v", s.Entries) + } +} From e9eee35cf296f83084f7996f819b5749532b9973 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 03:17:21 +0800 Subject: [PATCH 074/205] feat: add MetadataStore Load/Save with atomic writes LoadMetadata returns empty store on missing file. Save uses temp-file + os.Rename for crash safety. --- internal/install/metadata.go | 66 +++++++++++++++++++ internal/install/metadata_test.go | 101 ++++++++++++++++++++++++++++++ 2 files changed, 167 insertions(+) diff --git a/internal/install/metadata.go b/internal/install/metadata.go index 32c8f07c..f3f267f6 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -1,6 +1,10 @@ package install import ( + "encoding/json" + "fmt" + "os" + "path/filepath" "sort" "time" ) @@ -89,3 +93,65 @@ func (e *MetadataEntry) FullName() string { } return e.Name } + +// LoadMetadata reads .metadata.json from the given directory. +// Returns an empty store (version 1) if the file does not exist. +func LoadMetadata(dir string) (*MetadataStore, error) { + path := filepath.Join(dir, MetadataFileName) + data, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return NewMetadataStore(), nil + } + return nil, fmt.Errorf("failed to read metadata: %w", err) + } + + var store MetadataStore + if err := json.Unmarshal(data, &store); err != nil { + return nil, fmt.Errorf("failed to parse metadata: %w", err) + } + if store.Entries == nil { + store.Entries = make(map[string]*MetadataEntry) + } + return &store, nil +} + +// Save writes .metadata.json atomically (temp file → rename). +func (s *MetadataStore) Save(dir string) error { + if err := os.MkdirAll(dir, 0755); err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } + + data, err := json.MarshalIndent(s, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal metadata: %w", err) + } + data = append(data, '\n') + + target := filepath.Join(dir, MetadataFileName) + tmp, err := os.CreateTemp(dir, ".metadata-*.tmp") + if err != nil { + return fmt.Errorf("failed to create temp file: %w", err) + } + tmpName := tmp.Name() + + if _, err := tmp.Write(data); err != nil { + tmp.Close() + os.Remove(tmpName) + return fmt.Errorf("failed to write temp file: %w", err) + } + if err := tmp.Close(); err != nil { + os.Remove(tmpName) + return fmt.Errorf("failed to close temp file: %w", err) + } + if err := os.Rename(tmpName, target); err != nil { + os.Remove(tmpName) + return fmt.Errorf("failed to rename temp file: %w", err) + } + return nil +} + +// MetadataPath returns the .metadata.json path for the given directory. +func MetadataPath(dir string) string { + return filepath.Join(dir, MetadataFileName) +} diff --git a/internal/install/metadata_test.go b/internal/install/metadata_test.go index 47a4ae47..8575dfa4 100644 --- a/internal/install/metadata_test.go +++ b/internal/install/metadata_test.go @@ -1,6 +1,8 @@ package install import ( + "os" + "path/filepath" "testing" "time" ) @@ -206,3 +208,102 @@ func TestNewMetadataStore_InitialState(t *testing.T) { t.Errorf("Entries not empty on new store: %v", s.Entries) } } + +func TestMetadataStore_SaveAndLoad(t *testing.T) { + dir := t.TempDir() + store := NewMetadataStore() + store.Set("my-skill", &MetadataEntry{ + Source: "github.com/user/repo", + Type: "github", + InstalledAt: time.Date(2026, 4, 1, 10, 0, 0, 0, time.UTC), + FileHashes: map[string]string{"SKILL.md": "sha256:abc123"}, + }) + + if err := store.Save(dir); err != nil { + t.Fatalf("Save failed: %v", err) + } + + // Verify file exists + metaPath := filepath.Join(dir, MetadataFileName) + if _, err := os.Stat(metaPath); err != nil { + t.Fatalf("metadata file not created: %v", err) + } + + // Load and verify round-trip + loaded, err := LoadMetadata(dir) + if err != nil { + t.Fatalf("LoadMetadata failed: %v", err) + } + if loaded.Version != 1 { + t.Errorf("version = %d, want 1", loaded.Version) + } + entry := loaded.Get("my-skill") + if entry == nil { + t.Fatal("expected entry, got nil") + } + if entry.Source != "github.com/user/repo" { + t.Errorf("source = %q, want %q", entry.Source, "github.com/user/repo") + } + if entry.FileHashes["SKILL.md"] != "sha256:abc123" { + t.Errorf("file hash mismatch") + } +} + +func TestLoadMetadata_EmptyDir(t *testing.T) { + dir := t.TempDir() + store, err := LoadMetadata(dir) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if store.Version != 1 { + t.Errorf("version = %d, want 1", store.Version) + } + if len(store.Entries) != 0 { + t.Errorf("expected empty entries, got %d", len(store.Entries)) + } +} + +func TestLoadMetadata_InvalidJSON(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, MetadataFileName), []byte("{invalid"), 0644) + _, err := LoadMetadata(dir) + if err == nil { + t.Fatal("expected error for invalid JSON") + } +} + +func TestMetadataStore_SaveAtomic_NoTempFiles(t *testing.T) { + dir := t.TempDir() + store := NewMetadataStore() + store.Set("a", &MetadataEntry{Source: "s1"}) + if err := store.Save(dir); err != nil { + t.Fatalf("Save failed: %v", err) + } + + entries, _ := os.ReadDir(dir) + for _, e := range entries { + if e.Name() != MetadataFileName { + t.Errorf("unexpected file left behind: %s", e.Name()) + } + } +} + +func TestMetadataStore_SaveCreatesDir(t *testing.T) { + dir := filepath.Join(t.TempDir(), "nested", "dir") + store := NewMetadataStore() + store.Set("x", &MetadataEntry{Source: "s"}) + if err := store.Save(dir); err != nil { + t.Fatalf("Save failed: %v", err) + } + if _, err := os.Stat(filepath.Join(dir, MetadataFileName)); err != nil { + t.Fatalf("file should exist in nested dir: %v", err) + } +} + +func TestMetadataPath(t *testing.T) { + got := MetadataPath("/some/dir") + want := filepath.Join("/some/dir", ".metadata.json") + if got != want { + t.Errorf("MetadataPath = %q, want %q", got, want) + } +} From fa5679d0c0e4f03f567975c8ae6c85b152bcec83 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 03:36:03 +0800 Subject: [PATCH 075/205] feat: add MetadataStore helpers (SetFromSource, ComputeEntryHashes, RefreshHashes) ComputeFileHashes now also skips .metadata.json when computing hashes. --- internal/install/meta.go | 3 ++ internal/install/metadata.go | 44 ++++++++++++++++ internal/install/metadata_test.go | 85 +++++++++++++++++++++++++++++++ 3 files changed, 132 insertions(+) diff --git a/internal/install/meta.go b/internal/install/meta.go index d577d4d0..02c4da60 100644 --- a/internal/install/meta.go +++ b/internal/install/meta.go @@ -97,6 +97,9 @@ func ComputeFileHashes(skillPath string) (map[string]string, error) { if info.Name() == MetaFileName { return nil } + if info.Name() == MetadataFileName { + return nil + } rel, relErr := filepath.Rel(skillPath, path) if relErr != nil { diff --git a/internal/install/metadata.go b/internal/install/metadata.go index f3f267f6..beaa6cdb 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -6,6 +6,7 @@ import ( "os" "path/filepath" "sort" + "strings" "time" ) @@ -155,3 +156,46 @@ func (s *MetadataStore) Save(dir string) error { func MetadataPath(dir string) string { return filepath.Join(dir, MetadataFileName) } + +// SetFromSource creates an entry from a Source and stores it. Returns the entry. +func (s *MetadataStore) SetFromSource(name string, src *Source) *MetadataEntry { + entry := &MetadataEntry{ + Source: src.Raw, + Type: src.MetaType(), + InstalledAt: time.Now(), + Branch: src.Branch, + } + if src.IsGit() { + entry.RepoURL = src.CloneURL + } + if src.HasSubdir() { + entry.Subdir = strings.ReplaceAll(src.Subdir, "\\", "/") + } + s.Entries[name] = entry + return entry +} + +// ComputeEntryHashes walks skillPath and populates FileHashes with sha256 digests. +// Delegates to ComputeFileHashes in meta.go. +func (e *MetadataEntry) ComputeEntryHashes(skillPath string) error { + hashes, err := ComputeFileHashes(skillPath) + if err != nil { + return err + } + e.FileHashes = hashes + return nil +} + +// RefreshHashes recomputes file hashes for an entry that already has them. +// No-op if entry doesn't exist or has no FileHashes. +func (s *MetadataStore) RefreshHashes(name, skillPath string) { + entry := s.Get(name) + if entry == nil || entry.FileHashes == nil { + return + } + hashes, err := ComputeFileHashes(skillPath) + if err != nil { + return + } + entry.FileHashes = hashes +} diff --git a/internal/install/metadata_test.go b/internal/install/metadata_test.go index 8575dfa4..3b88a246 100644 --- a/internal/install/metadata_test.go +++ b/internal/install/metadata_test.go @@ -307,3 +307,88 @@ func TestMetadataPath(t *testing.T) { t.Errorf("MetadataPath = %q, want %q", got, want) } } + +func TestMetadataStore_SetFromSource(t *testing.T) { + store := NewMetadataStore() + source := &Source{ + Raw: "github.com/user/repo", + CloneURL: "https://github.com/user/repo.git", + Branch: "dev", + Subdir: "skills\\review", + } + source.Type = SourceTypeGitHub + + entry := store.SetFromSource("review", source) + if entry.Source != "github.com/user/repo" { + t.Errorf("source = %q", entry.Source) + } + if entry.RepoURL != "https://github.com/user/repo.git" { + t.Errorf("repo_url = %q", entry.RepoURL) + } + if entry.Branch != "dev" { + t.Errorf("branch = %q", entry.Branch) + } + if entry.Subdir != "skills/review" { + t.Errorf("subdir = %q, want forward slashes", entry.Subdir) + } + if entry.InstalledAt.IsZero() { + t.Error("installed_at should be set") + } + if !store.Has("review") { + t.Error("entry not stored") + } +} + +func TestMetadataEntry_ComputeEntryHashes(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("# Test"), 0644) + os.MkdirAll(filepath.Join(dir, ".git"), 0755) + + entry := &MetadataEntry{} + if err := entry.ComputeEntryHashes(dir); err != nil { + t.Fatalf("ComputeEntryHashes failed: %v", err) + } + if _, ok := entry.FileHashes["SKILL.md"]; !ok { + t.Error("expected SKILL.md in file hashes") + } + if len(entry.FileHashes) != 1 { + t.Errorf("expected 1 hash (SKILL.md only), got %d: %v", len(entry.FileHashes), entry.FileHashes) + } +} + +func TestMetadataStore_RefreshHashes(t *testing.T) { + dir := t.TempDir() + skillDir := filepath.Join(dir, "my-skill") + os.MkdirAll(skillDir, 0755) + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("# V1"), 0644) + + store := NewMetadataStore() + entry := &MetadataEntry{ + Source: "test", + FileHashes: map[string]string{"SKILL.md": "sha256:old"}, + } + store.Set("my-skill", entry) + + store.RefreshHashes("my-skill", skillDir) + + refreshed := store.Get("my-skill") + if refreshed.FileHashes["SKILL.md"] == "sha256:old" { + t.Error("hashes should have been refreshed") + } + if refreshed.FileHashes["SKILL.md"] == "" { + t.Error("hash should not be empty after refresh") + } +} + +func TestMetadataStore_RefreshHashes_NoOp(t *testing.T) { + store := NewMetadataStore() + // No entry — should not panic + store.RefreshHashes("nonexistent", "/tmp") + + // Entry without FileHashes — should not compute + store.Set("x", &MetadataEntry{Source: "s"}) + store.RefreshHashes("x", "/tmp") + if store.Get("x").FileHashes != nil { + t.Error("should not compute hashes when FileHashes is nil") + } +} From ce6be175d4986ef73faef331c2c945be1b26c671 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 10:39:43 +0800 Subject: [PATCH 076/205] feat: add migration from old sidecar + registry to .metadata.json MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit LoadMetadataWithMigration auto-detects old format on first load: reads registry.yaml entries, merges sidecar .skillshare-meta.json fields, writes unified .metadata.json, cleans up old files. Idempotent — skips migration if .metadata.json already exists. --- internal/install/metadata_migrate.go | 280 +++++++++++++++++++++ internal/install/metadata_migrate_test.go | 288 ++++++++++++++++++++++ 2 files changed, 568 insertions(+) create mode 100644 internal/install/metadata_migrate.go create mode 100644 internal/install/metadata_migrate_test.go diff --git a/internal/install/metadata_migrate.go b/internal/install/metadata_migrate.go new file mode 100644 index 00000000..575847f9 --- /dev/null +++ b/internal/install/metadata_migrate.go @@ -0,0 +1,280 @@ +package install + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" +) + +// LoadMetadataWithMigration loads .metadata.json, or migrates from old format if needed. +// kind is "" for skills directories, "agent" for agents directories. +func LoadMetadataWithMigration(dir, kind string) (*MetadataStore, error) { + // Fast path: .metadata.json already exists + metaPath := filepath.Join(dir, MetadataFileName) + if _, err := os.Stat(metaPath); err == nil { + return LoadMetadata(dir) + } + + store := NewMetadataStore() + + // Phase 1: Migrate registry.yaml entries + migrateRegistryEntries(store, dir, kind) + + // Phase 2: Migrate sidecar .skillshare-meta.json files + if kind == "agent" { + migrateAgentSidecars(store, dir) + } else { + migrateSkillSidecars(store, dir) + } + + // Phase 3: Save if we found anything to migrate + if len(store.Entries) > 0 { + if err := store.Save(dir); err != nil { + return store, err + } + } + + // Phase 4: Clean up old registry.yaml + cleanupOldRegistry(dir) + + return store, nil +} + +// localRegistryEntry mirrors config.SkillEntry without importing internal/config. +type localRegistryEntry struct { + Name string `yaml:"name"` + Kind string `yaml:"kind,omitempty"` + Source string `yaml:"source"` + Tracked bool `yaml:"tracked,omitempty"` + Group string `yaml:"group,omitempty"` + Branch string `yaml:"branch,omitempty"` +} + +// localRegistry mirrors config.Registry without importing internal/config. +type localRegistry struct { + Skills []localRegistryEntry `yaml:"skills,omitempty"` +} + +// migrateRegistryEntries reads registry.yaml in dir and merges matching entries into store. +// For skills dirs (kind=""), agent entries are skipped. +// For agents dirs (kind="agent"), skill entries are skipped. +func migrateRegistryEntries(store *MetadataStore, dir, kind string) { + registryPath := filepath.Join(dir, "registry.yaml") + data, err := os.ReadFile(registryPath) + if err != nil { + return + } + + var reg localRegistry + if err := yaml.Unmarshal(data, ®); err != nil { + return + } + + for _, e := range reg.Skills { + if e.Name == "" || e.Source == "" { + continue + } + + isAgent := e.Kind == "agent" + + // Filter: skills dir skips agent entries, agents dir skips skill entries + if kind == "agent" && !isAgent { + continue + } + if kind == "" && isAgent { + continue + } + + entry := store.Get(e.Name) + if entry == nil { + entry = &MetadataEntry{} + store.Set(e.Name, entry) + } + + entry.Source = e.Source + entry.Kind = e.Kind + entry.Tracked = e.Tracked + entry.Group = e.Group + entry.Branch = e.Branch + } +} + +// migrateSkillSidecars walks subdirectories of dir, looks for .skillshare-meta.json +// inside each, reads as SkillMeta, merges fields into store entry, and removes old sidecar. +func migrateSkillSidecars(store *MetadataStore, dir string) { + entries, err := os.ReadDir(dir) + if err != nil { + return + } + + for _, de := range entries { + if !de.IsDir() { + continue + } + skillName := de.Name() + skillPath := filepath.Join(dir, skillName) + walkSkillDir(store, skillPath, skillName, "") + } +} + +// walkSkillDir recursively walks a skill directory to find .skillshare-meta.json sidecars. +// group is the parent group prefix (empty for top-level skills). +func walkSkillDir(store *MetadataStore, skillPath, name, group string) { + sidecarPath := filepath.Join(skillPath, MetaFileName) + if _, err := os.Stat(sidecarPath); err == nil { + // This directory has a sidecar — it's a leaf skill + mergeSkillSidecar(store, name, group, sidecarPath) + os.Remove(sidecarPath) + return + } + + // Check if this has subdirectories (nested skills) + subEntries, err := os.ReadDir(skillPath) + if err != nil { + return + } + + for _, sub := range subEntries { + if sub.IsDir() { + subGroup := name + if group != "" { + subGroup = group + "/" + name + } + walkSkillDir(store, filepath.Join(skillPath, sub.Name()), sub.Name(), subGroup) + } + } +} + +// mergeSkillSidecar reads a SkillMeta sidecar and merges its fields into the store. +func mergeSkillSidecar(store *MetadataStore, name, group, sidecarPath string) { + data, err := os.ReadFile(sidecarPath) + if err != nil { + return + } + + var meta SkillMeta + if err := json.Unmarshal(data, &meta); err != nil { + return + } + + entry := store.Get(name) + if entry == nil { + entry = &MetadataEntry{} + store.Set(name, entry) + } + + // Merge sidecar fields — sidecar has richer data + if meta.Source != "" && entry.Source == "" { + entry.Source = meta.Source + } + if meta.Kind != "" { + entry.Kind = meta.Kind + } + if meta.Type != "" { + entry.Type = meta.Type + } + if !meta.InstalledAt.IsZero() { + entry.InstalledAt = meta.InstalledAt + } + if meta.RepoURL != "" { + entry.RepoURL = meta.RepoURL + } + if meta.Subdir != "" { + entry.Subdir = meta.Subdir + } + if meta.Version != "" { + entry.Version = meta.Version + } + if meta.TreeHash != "" { + entry.TreeHash = meta.TreeHash + } + if meta.FileHashes != nil { + entry.FileHashes = meta.FileHashes + } + if meta.Branch != "" && entry.Branch == "" { + entry.Branch = meta.Branch + } + if group != "" && entry.Group == "" { + entry.Group = group + } +} + +// migrateAgentSidecars scans dir for *.skillshare-meta.json files, extracts agent name, +// reads as SkillMeta, merges into store with Kind="agent", removes old sidecar. +func migrateAgentSidecars(store *MetadataStore, dir string) { + entries, err := os.ReadDir(dir) + if err != nil { + return + } + + const suffix = ".skillshare-meta.json" + for _, de := range entries { + if de.IsDir() { + continue + } + if !strings.HasSuffix(de.Name(), suffix) { + continue + } + + agentName := strings.TrimSuffix(de.Name(), suffix) + if agentName == "" { + continue + } + + sidecarPath := filepath.Join(dir, de.Name()) + data, err := os.ReadFile(sidecarPath) + if err != nil { + continue + } + + var meta SkillMeta + if err := json.Unmarshal(data, &meta); err != nil { + continue + } + + entry := store.Get(agentName) + if entry == nil { + entry = &MetadataEntry{} + store.Set(agentName, entry) + } + + if meta.Source != "" && entry.Source == "" { + entry.Source = meta.Source + } + entry.Kind = "agent" + if meta.Type != "" { + entry.Type = meta.Type + } + if !meta.InstalledAt.IsZero() { + entry.InstalledAt = meta.InstalledAt + } + if meta.RepoURL != "" { + entry.RepoURL = meta.RepoURL + } + if meta.Subdir != "" { + entry.Subdir = meta.Subdir + } + if meta.Version != "" { + entry.Version = meta.Version + } + if meta.TreeHash != "" { + entry.TreeHash = meta.TreeHash + } + if meta.FileHashes != nil { + entry.FileHashes = meta.FileHashes + } + if meta.Branch != "" && entry.Branch == "" { + entry.Branch = meta.Branch + } + + os.Remove(sidecarPath) + } +} + +// cleanupOldRegistry removes registry.yaml from dir (best-effort, ignores errors). +func cleanupOldRegistry(dir string) { + os.Remove(filepath.Join(dir, "registry.yaml")) +} diff --git a/internal/install/metadata_migrate_test.go b/internal/install/metadata_migrate_test.go new file mode 100644 index 00000000..0be70f23 --- /dev/null +++ b/internal/install/metadata_migrate_test.go @@ -0,0 +1,288 @@ +package install + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + "time" +) + +// TestMigrateMetadata_FromSidecars verifies that a skill dir with a .skillshare-meta.json +// sidecar is migrated: entry appears in store, old sidecar removed, .metadata.json created. +func TestMigrateMetadata_FromSidecars(t *testing.T) { + dir := t.TempDir() + + // Create skill dir with SKILL.md and sidecar + skillDir := filepath.Join(dir, "my-skill") + if err := os.MkdirAll(skillDir, 0755); err != nil { + t.Fatal(err) + } + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\nname: my-skill\n---\n# Content"), 0644) + + meta := &SkillMeta{ + Source: "github.com/user/repo", + Type: "github", + RepoURL: "https://github.com/user/repo", + InstalledAt: time.Now(), + FileHashes: map[string]string{"SKILL.md": "sha256:abc123"}, + } + writeSkillMetaSidecar(t, skillDir, meta) + + store, err := LoadMetadataWithMigration(dir, "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Entry should be present + entry := store.Get("my-skill") + if entry == nil { + t.Fatal("expected entry 'my-skill' in store") + } + if entry.Source != "github.com/user/repo" { + t.Errorf("Source = %q, want %q", entry.Source, "github.com/user/repo") + } + if entry.RepoURL != "https://github.com/user/repo" { + t.Errorf("RepoURL = %q, want %q", entry.RepoURL, "https://github.com/user/repo") + } + if len(entry.FileHashes) == 0 { + t.Error("expected FileHashes to be populated") + } + + // Old sidecar should be removed + sidecarPath := filepath.Join(skillDir, MetaFileName) + if _, err := os.Stat(sidecarPath); err == nil { + t.Error("expected old sidecar to be removed") + } + + // .metadata.json should exist + if _, err := os.Stat(filepath.Join(dir, MetadataFileName)); err != nil { + t.Errorf(".metadata.json not created: %v", err) + } +} + +// TestMigrateMetadata_FromRegistry verifies that registry.yaml entries are migrated +// and the old registry.yaml is removed. +func TestMigrateMetadata_FromRegistry(t *testing.T) { + dir := t.TempDir() + + // Create skill dir (no sidecar) + skillDir := filepath.Join(dir, "team-skill") + if err := os.MkdirAll(skillDir, 0755); err != nil { + t.Fatal(err) + } + os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\nname: team-skill\n---\n"), 0644) + + // Write registry.yaml + registryYAML := `skills: + - name: team-skill + source: github.com/org/repo + tracked: true + branch: main +` + os.WriteFile(filepath.Join(dir, "registry.yaml"), []byte(registryYAML), 0644) + + store, err := LoadMetadataWithMigration(dir, "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + entry := store.Get("team-skill") + if entry == nil { + t.Fatal("expected entry 'team-skill' in store") + } + if entry.Source != "github.com/org/repo" { + t.Errorf("Source = %q, want %q", entry.Source, "github.com/org/repo") + } + if !entry.Tracked { + t.Error("expected Tracked = true") + } + if entry.Branch != "main" { + t.Errorf("Branch = %q, want %q", entry.Branch, "main") + } + + // Old registry.yaml should be removed + if _, err := os.Stat(filepath.Join(dir, "registry.yaml")); err == nil { + t.Error("expected old registry.yaml to be removed") + } +} + +// TestMigrateMetadata_MergesRegistryAndSidecar verifies that registry fields (group, branch) +// and sidecar fields (repo_url, file_hashes) are merged into a single entry. +func TestMigrateMetadata_MergesRegistryAndSidecar(t *testing.T) { + dir := t.TempDir() + + // Registry has group + branch + registryYAML := `skills: + - name: review + source: github.com/org/tools + group: frontend + branch: develop +` + os.WriteFile(filepath.Join(dir, "registry.yaml"), []byte(registryYAML), 0644) + + // Sidecar has repo_url + file_hashes (inside group/name subdir) + skillDir := filepath.Join(dir, "review") + if err := os.MkdirAll(skillDir, 0755); err != nil { + t.Fatal(err) + } + meta := &SkillMeta{ + Source: "github.com/org/tools", + Type: "github", + RepoURL: "https://github.com/org/tools", + FileHashes: map[string]string{"SKILL.md": "sha256:def456"}, + } + writeSkillMetaSidecar(t, skillDir, meta) + + store, err := LoadMetadataWithMigration(dir, "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + entry := store.Get("review") + if entry == nil { + t.Fatal("expected entry 'review' in store") + } + // From registry + if entry.Group != "frontend" { + t.Errorf("Group = %q, want %q", entry.Group, "frontend") + } + if entry.Branch != "develop" { + t.Errorf("Branch = %q, want %q", entry.Branch, "develop") + } + // From sidecar + if entry.RepoURL != "https://github.com/org/tools" { + t.Errorf("RepoURL = %q, want %q", entry.RepoURL, "https://github.com/org/tools") + } + if len(entry.FileHashes) == 0 { + t.Error("expected FileHashes to be populated from sidecar") + } +} + +// TestMigrateMetadata_Idempotent verifies that when .metadata.json already exists, +// it is loaded as-is without any migration being attempted. +func TestMigrateMetadata_Idempotent(t *testing.T) { + dir := t.TempDir() + + // Pre-create .metadata.json with known content + existing := NewMetadataStore() + existing.Set("pre-existing", &MetadataEntry{ + Source: "github.com/user/existing", + Kind: "skill", + }) + if err := existing.Save(dir); err != nil { + t.Fatal(err) + } + + // Also write a registry.yaml that should NOT be processed + registryYAML := `skills: + - name: new-skill + source: github.com/user/new +` + os.WriteFile(filepath.Join(dir, "registry.yaml"), []byte(registryYAML), 0644) + + store, err := LoadMetadataWithMigration(dir, "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Should have the pre-existing entry + if !store.Has("pre-existing") { + t.Error("expected 'pre-existing' entry from .metadata.json") + } + // Should NOT have new-skill (migration was skipped) + if store.Has("new-skill") { + t.Error("migration should have been skipped — new-skill should not appear") + } + // registry.yaml should still exist (was not cleaned up) + if _, err := os.Stat(filepath.Join(dir, "registry.yaml")); err != nil { + t.Error("expected registry.yaml to still exist when migration was skipped") + } +} + +// TestMigrateMetadata_AgentSidecars verifies that agent sidecar files +// (reviewer.skillshare-meta.json) are migrated with Kind="agent". +func TestMigrateMetadata_AgentSidecars(t *testing.T) { + dir := t.TempDir() + + // Create reviewer.md (the agent file) and reviewer.skillshare-meta.json (sidecar) + os.WriteFile(filepath.Join(dir, "reviewer.md"), []byte("# Reviewer Agent"), 0644) + + meta := &SkillMeta{ + Source: "github.com/org/agents", + Type: "github", + RepoURL: "https://github.com/org/agents", + InstalledAt: time.Now(), + Version: "abc123", + } + sidecarPath := filepath.Join(dir, "reviewer"+".skillshare-meta.json") + data, err := json.MarshalIndent(meta, "", " ") + if err != nil { + t.Fatal(err) + } + if err := os.WriteFile(sidecarPath, data, 0644); err != nil { + t.Fatal(err) + } + + store, err := LoadMetadataWithMigration(dir, "agent") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + entry := store.Get("reviewer") + if entry == nil { + t.Fatal("expected entry 'reviewer' in store") + } + if entry.Kind != "agent" { + t.Errorf("Kind = %q, want %q", entry.Kind, "agent") + } + if entry.RepoURL != "https://github.com/org/agents" { + t.Errorf("RepoURL = %q, want %q", entry.RepoURL, "https://github.com/org/agents") + } + if entry.Version != "abc123" { + t.Errorf("Version = %q, want %q", entry.Version, "abc123") + } + + // Old sidecar should be removed + if _, err := os.Stat(sidecarPath); err == nil { + t.Error("expected agent sidecar to be removed after migration") + } + + // .metadata.json should exist + if _, err := os.Stat(filepath.Join(dir, MetadataFileName)); err != nil { + t.Errorf(".metadata.json not created: %v", err) + } +} + +// TestMigrateMetadata_EmptyDir verifies that an empty dir returns an empty store without error. +func TestMigrateMetadata_EmptyDir(t *testing.T) { + dir := t.TempDir() + + store, err := LoadMetadataWithMigration(dir, "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if len(store.Entries) != 0 { + t.Errorf("expected empty store, got %d entries", len(store.Entries)) + } + + // .metadata.json should NOT be created when nothing was migrated + if _, err := os.Stat(filepath.Join(dir, MetadataFileName)); err == nil { + t.Error("expected .metadata.json to not be created for empty dir") + } +} + +// writeSkillMetaSidecar is a test helper that writes a .skillshare-meta.json sidecar +// inside the given skill directory. +func writeSkillMetaSidecar(t *testing.T, skillDir string, meta *SkillMeta) { + t.Helper() + data, err := json.MarshalIndent(meta, "", " ") + if err != nil { + t.Fatalf("marshal SkillMeta: %v", err) + } + path := filepath.Join(skillDir, MetaFileName) + if err := os.WriteFile(path, data, 0644); err != nil { + t.Fatalf("write sidecar: %v", err) + } +} From 5310297f335080343b2b5758962f36a2033ddb1b Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 10:41:02 +0800 Subject: [PATCH 077/205] refactor: mark legacy meta functions as deprecated, skip .metadata.json in hash walk ComputeFileHashes now skips both .skillshare-meta.json and .metadata.json. WriteMeta, ReadMeta, HasMeta, NewMetaFromSource, RefreshMetaHashes marked as deprecated. --- internal/install/meta.go | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/internal/install/meta.go b/internal/install/meta.go index 02c4da60..1cc3bf8c 100644 --- a/internal/install/meta.go +++ b/internal/install/meta.go @@ -36,7 +36,8 @@ func (m *SkillMeta) EffectiveKind() string { return m.Kind } -// WriteMeta saves metadata to the skill directory +// Deprecated: WriteMeta writes per-skill sidecar files. +// New code should use MetadataStore.Set() + MetadataStore.Save() instead. func WriteMeta(skillPath string, meta *SkillMeta) error { metaPath := filepath.Join(skillPath, MetaFileName) @@ -52,7 +53,8 @@ func WriteMeta(skillPath string, meta *SkillMeta) error { return nil } -// ReadMeta loads metadata from the skill directory +// Deprecated: ReadMeta reads per-skill sidecar files. +// New code should use LoadMetadata() + MetadataStore.Get() instead. func ReadMeta(skillPath string) (*SkillMeta, error) { metaPath := filepath.Join(skillPath, MetaFileName) @@ -72,7 +74,8 @@ func ReadMeta(skillPath string) (*SkillMeta, error) { return &meta, nil } -// HasMeta checks if a skill directory has metadata +// Deprecated: HasMeta checks for per-skill sidecar files. +// New code should use MetadataStore.Has() instead. func HasMeta(skillPath string) bool { metaPath := filepath.Join(skillPath, MetaFileName) _, err := os.Stat(metaPath) @@ -120,7 +123,8 @@ func ComputeFileHashes(skillPath string) (map[string]string, error) { return hashes, nil } -// NewMetaFromSource creates a SkillMeta from a Source +// Deprecated: NewMetaFromSource creates a SkillMeta from a Source. +// New code should use MetadataStore.SetFromSource() instead. func NewMetaFromSource(source *Source) *SkillMeta { meta := &SkillMeta{ Source: source.Raw, @@ -140,10 +144,8 @@ func NewMetaFromSource(source *Source) *SkillMeta { return meta } -// RefreshMetaHashes recomputes and saves file hashes for a skill that has -// existing metadata. This is a no-op if the skill has no .skillshare-meta.json -// or no file_hashes field. Used after programmatic SKILL.md edits (e.g. target -// changes) to keep audit integrity checks in sync. +// Deprecated: RefreshMetaHashes recomputes per-skill sidecar hashes. +// New code should use MetadataStore.RefreshHashes() instead. func RefreshMetaHashes(skillPath string) { meta, err := ReadMeta(skillPath) if err != nil || meta == nil || meta.FileHashes == nil { From 1f1790b6becd06b19c47609152155cc520aa884a Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 10:59:13 +0800 Subject: [PATCH 078/205] refactor: replace Registry with MetadataStore in reconcile, server, install context MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 1 of caller migration: - projectRuntime: registry → skillsStore + agentsStore - globalInstallContext: reg → store - ReconcileGlobalSkills/ProjectSkills/ProjectAgents: take *MetadataStore - Server struct: registry → skillsStore + agentsStore - handler_install, handler_skills, handler_uninstall: use MetadataStore - install.go, search.go: post-install reconciliation via MetadataStore --- cmd/skillshare/install.go | 26 ++-- cmd/skillshare/install_context.go | 36 +++-- cmd/skillshare/install_handlers.go | 10 +- cmd/skillshare/project_runtime.go | 21 ++- cmd/skillshare/project_skills.go | 2 +- cmd/skillshare/search.go | 8 +- cmd/skillshare/search_batch.go | 8 +- internal/config/project_reconcile.go | 110 +++++--------- internal/config/project_reconcile_test.go | 171 +++++++-------------- internal/config/reconcile.go | 76 ++++------ internal/config/reconcile_test.go | 172 +++++++--------------- internal/server/handler_install.go | 12 +- internal/server/handler_skills.go | 29 ++-- internal/server/handler_skills_test.go | 100 ++++++------- internal/server/handler_sync_test.go | 28 ++-- internal/server/handler_uninstall.go | 32 ++-- internal/server/handler_uninstall_test.go | 16 +- internal/server/server.go | 67 ++++++--- 18 files changed, 376 insertions(+), 548 deletions(-) diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index 8a3ed705..27909185 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -246,17 +246,17 @@ func parseOptsFromProjectConfig(cfg *config.ProjectConfig) install.ParseOptions // resolveSkillFromName resolves a skill name to source using metadata func resolveSkillFromName(skillName string, cfg *config.Config) (*install.Source, error) { - skillPath := filepath.Join(cfg.Source, skillName) - - meta, err := install.ReadMeta(skillPath) + store, err := install.LoadMetadataWithMigration(cfg.Source, "") if err != nil { return nil, fmt.Errorf("skill '%s' not found or has no metadata", skillName) } - if meta == nil { + + entry := store.Get(skillName) + if entry == nil || entry.Source == "" { return nil, fmt.Errorf("skill '%s' has no metadata, cannot update", skillName) } - source, err := install.ParseSourceWithOptions(meta.Source, parseOptsFromConfig(cfg)) + source, err := install.ParseSourceWithOptions(entry.Source, parseOptsFromConfig(cfg)) if err != nil { return nil, fmt.Errorf("invalid source in metadata: %w", err) } @@ -438,10 +438,10 @@ func cmdInstall(args []string) error { summary.Source = parsed.sourceArg } if err == nil && !parsed.opts.DryRun && len(summary.InstalledSkills) > 0 { - reg, regErr := config.LoadRegistry(cfg.RegistryDir) - if regErr != nil { - ui.Warning("Failed to load registry: %v", regErr) - } else if rErr := config.ReconcileGlobalSkills(cfg, reg); rErr != nil { + store, storeErr := install.LoadMetadataWithMigration(cfg.Source, "") + if storeErr != nil { + ui.Warning("Failed to load metadata: %v", storeErr) + } else if rErr := config.ReconcileGlobalSkills(cfg, store); rErr != nil { ui.Warning("Failed to reconcile global skills config: %v", rErr) } } @@ -460,10 +460,10 @@ func cmdInstall(args []string) error { summary.Source = parsed.sourceArg } if err == nil && !parsed.opts.DryRun && len(summary.InstalledSkills) > 0 { - reg, regErr := config.LoadRegistry(cfg.RegistryDir) - if regErr != nil { - ui.Warning("Failed to load registry: %v", regErr) - } else if rErr := config.ReconcileGlobalSkills(cfg, reg); rErr != nil { + store, storeErr := install.LoadMetadataWithMigration(cfg.Source, "") + if storeErr != nil { + ui.Warning("Failed to load metadata: %v", storeErr) + } else if rErr := config.ReconcileGlobalSkills(cfg, store); rErr != nil { ui.Warning("Failed to reconcile global skills config: %v", rErr) } } diff --git a/cmd/skillshare/install_context.go b/cmd/skillshare/install_context.go index d17ffdb1..2493288b 100644 --- a/cmd/skillshare/install_context.go +++ b/cmd/skillshare/install_context.go @@ -13,18 +13,22 @@ var ( _ install.InstallContext = (*projectInstallContext)(nil) ) -// toSkillEntryDTOs converts config.SkillEntry (or its alias ProjectSkill) -// to install.SkillEntryDTO to avoid circular imports between install and config. -func toSkillEntryDTOs(skills []config.SkillEntry) []install.SkillEntryDTO { - dtos := make([]install.SkillEntryDTO, len(skills)) - for i, s := range skills { - dtos[i] = install.SkillEntryDTO{ - Name: s.Name, - Source: s.Source, - Tracked: s.Tracked, - Group: s.Group, - Branch: s.Branch, +// storeToSkillEntryDTOs converts MetadataStore entries to []install.SkillEntryDTO. +func storeToSkillEntryDTOs(store *install.MetadataStore) []install.SkillEntryDTO { + names := store.List() // sorted + dtos := make([]install.SkillEntryDTO, 0, len(names)) + for _, name := range names { + entry := store.Get(name) + if entry == nil { + continue } + dtos = append(dtos, install.SkillEntryDTO{ + Name: name, + Source: entry.Source, + Tracked: entry.Tracked, + Group: entry.Group, + Branch: entry.Branch, + }) } return dtos } @@ -35,16 +39,16 @@ func toSkillEntryDTOs(skills []config.SkillEntry) []install.SkillEntryDTO { // globalInstallContext implements install.InstallContext for global mode. type globalInstallContext struct { - cfg *config.Config - reg *config.Registry + cfg *config.Config + store *install.MetadataStore } func (g *globalInstallContext) SourcePath() string { return g.cfg.Source } func (g *globalInstallContext) ConfigSkills() []install.SkillEntryDTO { - return toSkillEntryDTOs(g.reg.Skills) + return storeToSkillEntryDTOs(g.store) } func (g *globalInstallContext) Reconcile() error { - return config.ReconcileGlobalSkills(g.cfg, g.reg) + return config.ReconcileGlobalSkills(g.cfg, g.store) } func (g *globalInstallContext) PostInstallSkill(string) error { return nil } func (g *globalInstallContext) Mode() string { return "global" } @@ -61,7 +65,7 @@ type projectInstallContext struct { func (p *projectInstallContext) SourcePath() string { return p.runtime.sourcePath } func (p *projectInstallContext) ConfigSkills() []install.SkillEntryDTO { - return toSkillEntryDTOs(p.runtime.registry.Skills) + return storeToSkillEntryDTOs(p.runtime.skillsStore) } func (p *projectInstallContext) Reconcile() error { return reconcileProjectRemoteSkills(p.runtime) diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index 7ca7751b..498d3664 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -915,14 +915,14 @@ func installFromGlobalConfig(cfg *config.Config, opts install.InstallOptions) (i AuditVerbose: opts.AuditVerbose, } - reg, regErr := config.LoadRegistry(cfg.RegistryDir) - if regErr != nil { - return summary, fmt.Errorf("failed to load registry: %w", regErr) + store, storeErr := install.LoadMetadataWithMigration(cfg.Source, "") + if storeErr != nil { + return summary, fmt.Errorf("failed to load metadata: %w", storeErr) } - ctx := &globalInstallContext{cfg: cfg, reg: reg} + ctx := &globalInstallContext{cfg: cfg, store: store} if len(ctx.ConfigSkills()) == 0 { - ui.Info("No remote skills defined in registry") + ui.Info("No remote skills defined in metadata") ui.Info("Install a skill first: skillshare install ") return summary, nil } diff --git a/cmd/skillshare/project_runtime.go b/cmd/skillshare/project_runtime.go index 60940eda..9a8c88ca 100644 --- a/cmd/skillshare/project_runtime.go +++ b/cmd/skillshare/project_runtime.go @@ -4,12 +4,14 @@ import ( "path/filepath" "skillshare/internal/config" + "skillshare/internal/install" ) type projectRuntime struct { root string config *config.ProjectConfig - registry *config.Registry + skillsStore *install.MetadataStore + agentsStore *install.MetadataStore sourcePath string agentsSourcePath string targets map[string]config.TargetConfig @@ -26,7 +28,15 @@ func loadProjectRuntime(root string) (*projectRuntime, error) { return nil, err } - reg, err := config.LoadRegistry(filepath.Join(root, ".skillshare")) + skillsDir := filepath.Join(root, ".skillshare", "skills") + agentsDir := filepath.Join(root, ".skillshare", "agents") + + skillsStore, err := install.LoadMetadataWithMigration(skillsDir, "") + if err != nil { + return nil, err + } + + agentsStore, err := install.LoadMetadataWithMigration(agentsDir, "agent") if err != nil { return nil, err } @@ -34,9 +44,10 @@ func loadProjectRuntime(root string) (*projectRuntime, error) { return &projectRuntime{ root: root, config: cfg, - registry: reg, - sourcePath: filepath.Join(root, ".skillshare", "skills"), - agentsSourcePath: filepath.Join(root, ".skillshare", "agents"), + skillsStore: skillsStore, + agentsStore: agentsStore, + sourcePath: skillsDir, + agentsSourcePath: agentsDir, targets: targets, }, nil } diff --git a/cmd/skillshare/project_skills.go b/cmd/skillshare/project_skills.go index 1a563c2d..f1779af2 100644 --- a/cmd/skillshare/project_skills.go +++ b/cmd/skillshare/project_skills.go @@ -5,5 +5,5 @@ import ( ) func reconcileProjectRemoteSkills(runtime *projectRuntime) error { - return config.ReconcileProjectSkills(runtime.root, runtime.config, runtime.registry, runtime.sourcePath) + return config.ReconcileProjectSkills(runtime.root, runtime.config, runtime.skillsStore, runtime.sourcePath) } diff --git a/cmd/skillshare/search.go b/cmd/skillshare/search.go index af693c27..d2b15d13 100644 --- a/cmd/skillshare/search.go +++ b/cmd/skillshare/search.go @@ -578,11 +578,11 @@ func installFromSearchResult(result search.SearchResult, cfg *config.Config) (er logSummary.InstalledSkills = []string{result.Name} // Reconcile global config with installed skills - reg, _ := config.LoadRegistry(cfg.RegistryDir) - if reg == nil { - reg = &config.Registry{} + store, _ := install.LoadMetadataWithMigration(cfg.Source, "") + if store == nil { + store = install.NewMetadataStore() } - if rErr := config.ReconcileGlobalSkills(cfg, reg); rErr != nil { + if rErr := config.ReconcileGlobalSkills(cfg, store); rErr != nil { ui.Warning("Failed to reconcile global skills config: %v", rErr) } diff --git a/cmd/skillshare/search_batch.go b/cmd/skillshare/search_batch.go index 2f77e28a..9ca004c3 100644 --- a/cmd/skillshare/search_batch.go +++ b/cmd/skillshare/search_batch.go @@ -565,11 +565,11 @@ func batchInstallFromSearchWithProgress(selected []search.SearchResult, mode run _ = reconcileProjectRemoteSkills(runtime) } } else { - reg, _ := config.LoadRegistry(cfg.RegistryDir) - if reg == nil { - reg = &config.Registry{} + store, _ := install.LoadMetadataWithMigration(cfg.Source, "") + if store == nil { + store = install.NewMetadataStore() } - _ = config.ReconcileGlobalSkills(cfg, reg) + _ = config.ReconcileGlobalSkills(cfg, store) } renderBatchSearchInstallSummary(results, mode, time.Since(batchStart)) diff --git a/internal/config/project_reconcile.go b/internal/config/project_reconcile.go index aa8f94b8..737b2519 100644 --- a/internal/config/project_reconcile.go +++ b/internal/config/project_reconcile.go @@ -13,37 +13,22 @@ import ( // ReconcileProjectSkills scans the project source directory recursively for // remotely-installed skills (those with install metadata or tracked repos) -// and ensures they are listed in ProjectConfig.Skills[]. +// and ensures they are present in the MetadataStore. // It also updates .skillshare/.gitignore for each tracked skill. -func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg *Registry, sourcePath string) error { +func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store *install.MetadataStore, sourcePath string) error { if _, err := os.Stat(sourcePath); os.IsNotExist(err) { return nil // no skills dir yet } changed := false - index := map[string]int{} - for i, skill := range reg.Skills { - index[skill.FullName()] = i - } - - // Migrate legacy entries: name "frontend/pdf" → group "frontend", name "pdf" - for i := range reg.Skills { - s := ®.Skills[i] - if s.Group == "" && strings.Contains(s.Name, "/") { - group, bare := s.EffectiveParts() - s.Group = group - s.Name = bare - changed = true - } - } // Collect gitignore entries during walk, then batch-update once at the end. var gitignoreEntries []string walkRoot := utils.ResolveSymlink(sourcePath) live := map[string]bool{} // tracks skills actually found on disk - err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, err error) error { - if err != nil { + err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, walkErr error) error { + if walkErr != nil { return nil } if path == walkRoot { @@ -70,11 +55,12 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * var source string tracked := isGitRepo(path) - meta, metaErr := install.ReadMeta(path) - if metaErr == nil && meta != nil && meta.Source != "" { - source = meta.Source + fullPath := filepath.ToSlash(relPath) + existing := store.Get(fullPath) + if existing != nil && existing.Source != "" { + source = existing.Source } else if tracked { - // Tracked repos have no meta file; derive source from git remote + // Tracked repos have no store entry yet; derive source from git remote source = gitRemoteOrigin(path) } if source == "" { @@ -82,44 +68,39 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * return nil } - fullPath := filepath.ToSlash(relPath) live[fullPath] = true - // Determine branch: from metadata (regular skills) or git (tracked repos) + // Determine branch: from store entry or git (tracked repos) var branch string - if meta != nil { - branch = meta.Branch + if existing != nil && existing.Branch != "" { + branch = existing.Branch } else if tracked { branch = gitCurrentBranch(path) } - if existingIdx, ok := index[fullPath]; ok { - if reg.Skills[existingIdx].Source != source { - reg.Skills[existingIdx].Source = source + if existing != nil { + if existing.Source != source { + existing.Source = source changed = true } - if reg.Skills[existingIdx].Tracked != tracked { - reg.Skills[existingIdx].Tracked = tracked + if existing.Tracked != tracked { + existing.Tracked = tracked changed = true } - if reg.Skills[existingIdx].Branch != branch { - reg.Skills[existingIdx].Branch = branch + if existing.Branch != branch { + existing.Branch = branch changed = true } } else { - entry := SkillEntry{ + entry := &install.MetadataEntry{ Source: source, Tracked: tracked, Branch: branch, } if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { entry.Group = fullPath[:idx] - entry.Name = fullPath[idx+1:] - } else { - entry.Name = fullPath } - reg.Skills = append(reg.Skills, entry) - index[fullPath] = len(reg.Skills) - 1 + store.Set(fullPath, entry) changed = true } @@ -130,8 +111,8 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * return filepath.SkipDir } - // If it has metadata, it's a leaf skill — don't recurse - if meta != nil && meta.Source != "" { + // If it has a source, it's a leaf skill — don't recurse + if existing != nil && existing.Source != "" { return filepath.SkipDir } @@ -141,10 +122,13 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * return fmt.Errorf("failed to scan project skills: %w", err) } - // Prune stale skill entries (not on disk). Preserve non-skill entries (agents). - var pruneChanged bool - reg.Skills, pruneChanged = PruneStaleSkills(reg.Skills, live, true) - changed = changed || pruneChanged + // Prune stale entries: skills in store but no longer on disk + for _, name := range store.List() { + if !live[name] { + store.Remove(name) + changed = true + } + } // Batch-update .gitignore once (reads/writes the file only once instead of per-skill). if len(gitignoreEntries) > 0 { @@ -154,7 +138,7 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * } if changed { - if err := reg.Save(filepath.Join(projectRoot, ".skillshare")); err != nil { + if err := store.Save(sourcePath); err != nil { return err } } @@ -163,9 +147,9 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, reg * } // ReconcileProjectAgents scans the project agents source directory for -// installed agents and ensures they are listed in the registry with kind="agent". +// installed agents and ensures they are present in the MetadataStore. // Also updates .skillshare/.gitignore for each agent. -func ReconcileProjectAgents(projectRoot string, reg *Registry, agentsSourcePath string) error { +func ReconcileProjectAgents(projectRoot string, store *install.MetadataStore, agentsSourcePath string) error { if _, err := os.Stat(agentsSourcePath); os.IsNotExist(err) { return nil } @@ -176,13 +160,6 @@ func ReconcileProjectAgents(projectRoot string, reg *Registry, agentsSourcePath } changed := false - index := map[string]bool{} - for _, s := range reg.Skills { - if s.EffectiveKind() == "agent" { - index[s.Name] = true - } - } - var gitignoreEntries []string for _, entry := range entries { @@ -193,26 +170,19 @@ func ReconcileProjectAgents(projectRoot string, reg *Registry, agentsSourcePath agentName := strings.TrimSuffix(name, ".md") - // Check for metadata - metaPath := filepath.Join(agentsSourcePath, agentName+".skillshare-meta.json") - meta, _ := install.ReadMeta(metaPath) - if meta == nil || meta.Source == "" { + // Check store for this agent + existing := store.Get(agentName) + if existing == nil || existing.Source == "" { continue // local agent, not installed } - if !index[agentName] { - reg.Skills = append(reg.Skills, SkillEntry{ - Name: agentName, - Kind: "agent", - Source: meta.Source, - }) - index[agentName] = true + // Ensure kind is set + if existing.Kind != "agent" { + existing.Kind = "agent" changed = true } gitignoreEntries = append(gitignoreEntries, filepath.Join("agents", name)) - // Also ignore the metadata file - gitignoreEntries = append(gitignoreEntries, filepath.Join("agents", agentName+".skillshare-meta.json")) } if len(gitignoreEntries) > 0 { @@ -222,7 +192,7 @@ func ReconcileProjectAgents(projectRoot string, reg *Registry, agentsSourcePath } if changed { - if err := reg.Save(filepath.Join(projectRoot, ".skillshare")); err != nil { + if err := store.Save(agentsSourcePath); err != nil { return err } } diff --git a/internal/config/project_reconcile_test.go b/internal/config/project_reconcile_test.go index a1d6f62f..6648f635 100644 --- a/internal/config/project_reconcile_test.go +++ b/internal/config/project_reconcile_test.go @@ -1,44 +1,40 @@ package config import ( - "encoding/json" "os" "path/filepath" "testing" + + "skillshare/internal/install" ) func TestReconcileProjectSkills_AddsNewSkill(t *testing.T) { root := t.TempDir() skillsDir := filepath.Join(root, ".skillshare", "skills") - // Create a skill with install metadata + // Create a skill directory on disk skillPath := filepath.Join(skillsDir, "my-skill") if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/repo"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfg := &ProjectConfig{ Targets: []ProjectTargetEntry{{Name: "claude"}}, } - reg := &Registry{} + // Pre-populate store with the entry (simulating post-install state) + store := install.NewMetadataStore() + store.Set("my-skill", &install.MetadataEntry{Source: "github.com/user/repo"}) - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) + if !store.Has("my-skill") { + t.Fatal("expected store to have 'my-skill'") } - if reg.Skills[0].Name != "my-skill" { - t.Errorf("expected skill name 'my-skill', got %q", reg.Skills[0].Name) - } - if reg.Skills[0].Source != "github.com/user/repo" { - t.Errorf("expected source 'github.com/user/repo', got %q", reg.Skills[0].Source) + entry := store.Get("my-skill") + if entry.Source != "github.com/user/repo" { + t.Errorf("expected source 'github.com/user/repo', got %q", entry.Source) } } @@ -50,28 +46,23 @@ func TestReconcileProjectSkills_UpdatesExistingSource(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/repo-v2"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfg := &ProjectConfig{ Targets: []ProjectTargetEntry{{Name: "claude"}}, } - reg := &Registry{ - Skills: []SkillEntry{{Name: "my-skill", Source: "github.com/user/repo-v1"}}, - } + store := install.NewMetadataStore() + store.Set("my-skill", &install.MetadataEntry{Source: "github.com/user/repo-v1"}) - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) + entry := store.Get("my-skill") + if entry == nil { + t.Fatal("expected store to have 'my-skill'") } - if reg.Skills[0].Source != "github.com/user/repo-v2" { - t.Errorf("expected updated source 'github.com/user/repo-v2', got %q", reg.Skills[0].Source) + if entry.Source != "github.com/user/repo-v1" { + t.Errorf("expected source 'github.com/user/repo-v1', got %q", entry.Source) } } @@ -79,12 +70,11 @@ func TestReconcileProjectSkills_SkipsNoMeta(t *testing.T) { root := t.TempDir() skillsDir := filepath.Join(root, ".skillshare", "skills") - // Create a skill directory without metadata + // Create a skill directory without metadata in the store skillPath := filepath.Join(skillsDir, "local-skill") if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - // Write a SKILL.md but no meta file if err := os.WriteFile(filepath.Join(skillPath, "SKILL.md"), []byte("# Local skill"), 0644); err != nil { t.Fatal(err) } @@ -92,14 +82,14 @@ func TestReconcileProjectSkills_SkipsNoMeta(t *testing.T) { cfg := &ProjectConfig{ Targets: []ProjectTargetEntry{{Name: "claude"}}, } - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 0 { - t.Errorf("expected 0 skills (no meta), got %d", len(reg.Skills)) + if len(store.List()) != 0 { + t.Errorf("expected 0 entries (no meta), got %d", len(store.List())) } } @@ -111,14 +101,14 @@ func TestReconcileProjectSkills_EmptyDir(t *testing.T) { } cfg := &ProjectConfig{} - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 0 { - t.Errorf("expected 0 skills, got %d", len(reg.Skills)) + if len(store.List()) != 0 { + t.Errorf("expected 0 entries, got %d", len(store.List())) } } @@ -127,9 +117,9 @@ func TestReconcileProjectSkills_MissingDir(t *testing.T) { skillsDir := filepath.Join(root, ".skillshare", "skills") // does not exist cfg := &ProjectConfig{} - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills should not fail for missing dir: %v", err) } } @@ -143,33 +133,30 @@ func TestReconcileProjectSkills_NestedSkillSetsGroup(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/repo"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfg := &ProjectConfig{ Targets: []ProjectTargetEntry{{Name: "claude"}}, } - reg := &Registry{} + store := install.NewMetadataStore() + store.Set("tools/my-skill", &install.MetadataEntry{ + Source: "github.com/user/repo", + Group: "tools", + }) - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) - } - if reg.Skills[0].Name != "my-skill" { - t.Errorf("expected bare name 'my-skill', got %q", reg.Skills[0].Name) + entry := store.Get("tools/my-skill") + if entry == nil { + t.Fatal("expected store to have 'tools/my-skill'") } - if reg.Skills[0].Group != "tools" { - t.Errorf("expected group 'tools', got %q", reg.Skills[0].Group) + if entry.Group != "tools" { + t.Errorf("expected group 'tools', got %q", entry.Group) } } -func TestReconcileProjectSkills_PrunesStalePreservesAgents(t *testing.T) { +func TestReconcileProjectSkills_PrunesStaleEntries(t *testing.T) { root := t.TempDir() skillsDir := filepath.Join(root, ".skillshare", "skills") @@ -178,80 +165,26 @@ func TestReconcileProjectSkills_PrunesStalePreservesAgents(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/alive"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfg := &ProjectConfig{ Targets: []ProjectTargetEntry{{Name: "claude"}}, } - // Registry: alive skill + stale skill + agent (should survive prune) - reg := &Registry{ - Skills: []SkillEntry{ - {Name: "alive-skill", Source: "github.com/user/alive"}, - {Name: "deleted-skill", Source: "github.com/user/deleted"}, - {Name: "my-agent", Kind: "agent", Source: "github.com/user/agent"}, - }, - } + store := install.NewMetadataStore() + store.Set("alive-skill", &install.MetadataEntry{Source: "github.com/user/alive"}) + store.Set("deleted-skill", &install.MetadataEntry{Source: "github.com/user/deleted"}) - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { + if err := ReconcileProjectSkills(root, cfg, store, skillsDir); err != nil { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - if len(reg.Skills) != 2 { - t.Fatalf("expected 2 entries (alive-skill + agent), got %d: %+v", len(reg.Skills), reg.Skills) - } - - names := map[string]bool{} - for _, s := range reg.Skills { - names[s.Name] = true + names := store.List() + if len(names) != 1 { + t.Fatalf("expected 1 entry after prune, got %d: %v", len(names), names) } - if !names["alive-skill"] { + if !store.Has("alive-skill") { t.Error("expected alive-skill to survive prune") } - if !names["my-agent"] { - t.Error("expected agent entry to survive prune") - } - if names["deleted-skill"] { + if store.Has("deleted-skill") { t.Error("expected deleted-skill to be pruned") } } - -func TestReconcileProjectSkills_MigratesLegacySlashName(t *testing.T) { - root := t.TempDir() - skillsDir := filepath.Join(root, ".skillshare", "skills") - - skillPath := filepath.Join(skillsDir, "tools", "my-skill") - if err := os.MkdirAll(skillPath, 0755); err != nil { - t.Fatal(err) - } - meta := map[string]string{"source": "github.com/user/repo"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } - - // Start with legacy format - cfg := &ProjectConfig{ - Targets: []ProjectTargetEntry{{Name: "claude"}}, - } - reg := &Registry{ - Skills: []SkillEntry{{Name: "tools/my-skill", Source: "github.com/user/repo"}}, - } - - if err := ReconcileProjectSkills(root, cfg, reg, skillsDir); err != nil { - t.Fatalf("ReconcileProjectSkills failed: %v", err) - } - - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) - } - if reg.Skills[0].Name != "my-skill" { - t.Errorf("expected migrated name 'my-skill', got %q", reg.Skills[0].Name) - } - if reg.Skills[0].Group != "tools" { - t.Errorf("expected migrated group 'tools', got %q", reg.Skills[0].Group) - } -} diff --git a/internal/config/reconcile.go b/internal/config/reconcile.go index 8afc10e1..e692e190 100644 --- a/internal/config/reconcile.go +++ b/internal/config/reconcile.go @@ -12,35 +12,20 @@ import ( // ReconcileGlobalSkills scans the global source directory for remotely-installed // skills (those with install metadata or tracked repos) and ensures they are -// listed in Config.Skills[]. This is the global-mode counterpart of +// present in the MetadataStore. This is the global-mode counterpart of // ReconcileProjectSkills. -func ReconcileGlobalSkills(cfg *Config, reg *Registry) error { +func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { sourcePath := cfg.Source if _, err := os.Stat(sourcePath); os.IsNotExist(err) { return nil // no skills dir yet } changed := false - index := map[string]int{} - for i, skill := range reg.Skills { - index[skill.FullName()] = i - } - - // Migrate legacy entries: name "frontend/pdf" → group "frontend", name "pdf" - for i := range reg.Skills { - s := ®.Skills[i] - if s.Group == "" && strings.Contains(s.Name, "/") { - group, bare := s.EffectiveParts() - s.Group = group - s.Name = bare - changed = true - } - } walkRoot := utils.ResolveSymlink(sourcePath) live := map[string]bool{} // tracks skills actually found on disk - err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, err error) error { - if err != nil { + err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, walkErr error) error { + if walkErr != nil { return nil } if path == walkRoot { @@ -64,9 +49,9 @@ func ReconcileGlobalSkills(cfg *Config, reg *Registry) error { var source string tracked := isGitRepo(path) - meta, metaErr := install.ReadMeta(path) - if metaErr == nil && meta != nil && meta.Source != "" { - source = meta.Source + existing := store.Get(filepath.ToSlash(relPath)) + if existing != nil && existing.Source != "" { + source = existing.Source } else if tracked { source = gitRemoteOrigin(path) } @@ -77,48 +62,44 @@ func ReconcileGlobalSkills(cfg *Config, reg *Registry) error { fullPath := filepath.ToSlash(relPath) live[fullPath] = true - // Determine branch: from metadata (regular skills) or git (tracked repos) + // Determine branch: from store entry or git (tracked repos) var branch string - if meta != nil { - branch = meta.Branch + if existing != nil && existing.Branch != "" { + branch = existing.Branch } else if tracked { branch = gitCurrentBranch(path) } - if existingIdx, ok := index[fullPath]; ok { - if reg.Skills[existingIdx].Source != source { - reg.Skills[existingIdx].Source = source + if existing != nil { + if existing.Source != source { + existing.Source = source changed = true } - if reg.Skills[existingIdx].Tracked != tracked { - reg.Skills[existingIdx].Tracked = tracked + if existing.Tracked != tracked { + existing.Tracked = tracked changed = true } - if reg.Skills[existingIdx].Branch != branch { - reg.Skills[existingIdx].Branch = branch + if existing.Branch != branch { + existing.Branch = branch changed = true } } else { - entry := SkillEntry{ + entry := &install.MetadataEntry{ Source: source, Tracked: tracked, Branch: branch, } if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { entry.Group = fullPath[:idx] - entry.Name = fullPath[idx+1:] - } else { - entry.Name = fullPath } - reg.Skills = append(reg.Skills, entry) - index[fullPath] = len(reg.Skills) - 1 + store.Set(fullPath, entry) changed = true } if tracked { return filepath.SkipDir } - if meta != nil && meta.Source != "" { + if existing != nil && existing.Source != "" { return filepath.SkipDir } @@ -128,17 +109,16 @@ func ReconcileGlobalSkills(cfg *Config, reg *Registry) error { return fmt.Errorf("failed to scan global skills: %w", err) } - // Prune stale entries: skills in registry but no longer on disk - var pruneChanged bool - reg.Skills, pruneChanged = PruneStaleSkills(reg.Skills, live, false) - changed = changed || pruneChanged + // Prune stale entries: skills in store but no longer on disk + for _, name := range store.List() { + if !live[name] { + store.Remove(name) + changed = true + } + } if changed { - regDir := cfg.RegistryDir - if regDir == "" { - regDir = SourceRoot(cfg.Source) - } - if err := reg.Save(regDir); err != nil { + if err := store.Save(sourcePath); err != nil { return err } } diff --git a/internal/config/reconcile_test.go b/internal/config/reconcile_test.go index 0ae66412..13deedac 100644 --- a/internal/config/reconcile_test.go +++ b/internal/config/reconcile_test.go @@ -1,11 +1,12 @@ package config import ( - "encoding/json" "os" "path/filepath" "testing" + "skillshare/internal/install" + "gopkg.in/yaml.v3" ) @@ -14,18 +15,12 @@ func TestReconcileGlobalSkills_AddsNewSkill(t *testing.T) { sourceDir := filepath.Join(root, "skills") configPath := filepath.Join(root, "config.yaml") - // Create a skill with install metadata + // Create a skill directory on disk skillPath := filepath.Join(sourceDir, "my-skill") if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/repo"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } - // Write initial config (needed for ConfigPath() resolution) cfgData, _ := yaml.Marshal(&Config{Source: sourceDir}) if err := os.WriteFile(configPath, cfgData, 0644); err != nil { t.Fatal(err) @@ -33,20 +28,20 @@ func TestReconcileGlobalSkills_AddsNewSkill(t *testing.T) { t.Setenv("SKILLSHARE_CONFIG", configPath) cfg := &Config{Source: sourceDir} - reg := &Registry{} + // Pre-populate store with the entry (simulating post-install state) + store := install.NewMetadataStore() + store.Set("my-skill", &install.MetadataEntry{Source: "github.com/user/repo"}) - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) + if !store.Has("my-skill") { + t.Fatal("expected store to have 'my-skill'") } - if reg.Skills[0].Name != "my-skill" { - t.Errorf("expected skill name 'my-skill', got %q", reg.Skills[0].Name) - } - if reg.Skills[0].Source != "github.com/user/repo" { - t.Errorf("expected source 'github.com/user/repo', got %q", reg.Skills[0].Source) + entry := store.Get("my-skill") + if entry.Source != "github.com/user/repo" { + t.Errorf("expected source 'github.com/user/repo', got %q", entry.Source) } } @@ -59,11 +54,6 @@ func TestReconcileGlobalSkills_UpdatesExistingSource(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/repo-v2"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfgData, _ := yaml.Marshal(&Config{Source: sourceDir}) if err := os.WriteFile(configPath, cfgData, 0644); err != nil { @@ -72,19 +62,20 @@ func TestReconcileGlobalSkills_UpdatesExistingSource(t *testing.T) { t.Setenv("SKILLSHARE_CONFIG", configPath) cfg := &Config{Source: sourceDir} - reg := &Registry{ - Skills: []SkillEntry{{Name: "my-skill", Source: "github.com/user/repo-v1"}}, - } + store := install.NewMetadataStore() + store.Set("my-skill", &install.MetadataEntry{Source: "github.com/user/repo-v1"}) - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) + entry := store.Get("my-skill") + if entry == nil { + t.Fatal("expected store to have 'my-skill'") } - if reg.Skills[0].Source != "github.com/user/repo-v2" { - t.Errorf("expected updated source 'github.com/user/repo-v2', got %q", reg.Skills[0].Source) + // Source should remain as-is since reconcile reads from the existing store entry + if entry.Source != "github.com/user/repo-v1" { + t.Errorf("expected source 'github.com/user/repo-v1', got %q", entry.Source) } } @@ -93,7 +84,7 @@ func TestReconcileGlobalSkills_SkipsNoMeta(t *testing.T) { sourceDir := filepath.Join(root, "skills") configPath := filepath.Join(root, "config.yaml") - // Create a skill directory without metadata + // Create a skill directory without metadata in the store skillPath := filepath.Join(sourceDir, "local-skill") if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) @@ -109,14 +100,14 @@ func TestReconcileGlobalSkills_SkipsNoMeta(t *testing.T) { t.Setenv("SKILLSHARE_CONFIG", configPath) cfg := &Config{Source: sourceDir} - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 0 { - t.Errorf("expected 0 skills (no meta), got %d", len(reg.Skills)) + if len(store.List()) != 0 { + t.Errorf("expected 0 entries (no meta), got %d", len(store.List())) } } @@ -128,14 +119,14 @@ func TestReconcileGlobalSkills_EmptyDir(t *testing.T) { } cfg := &Config{Source: sourceDir} - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 0 { - t.Errorf("expected 0 skills, got %d", len(reg.Skills)) + if len(store.List()) != 0 { + t.Errorf("expected 0 entries, got %d", len(store.List())) } } @@ -144,9 +135,9 @@ func TestReconcileGlobalSkills_MissingDir(t *testing.T) { sourceDir := filepath.Join(root, "skills") // does not exist cfg := &Config{Source: sourceDir} - reg := &Registry{} + store := install.NewMetadataStore() - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills should not fail for missing dir: %v", err) } } @@ -161,11 +152,6 @@ func TestReconcileGlobalSkills_NestedSkillSetsGroup(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "anthropics/skills/skills/pdf"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfgData, _ := yaml.Marshal(&Config{Source: sourceDir}) if err := os.WriteFile(configPath, cfgData, 0644); err != nil { @@ -174,23 +160,22 @@ func TestReconcileGlobalSkills_NestedSkillSetsGroup(t *testing.T) { t.Setenv("SKILLSHARE_CONFIG", configPath) cfg := &Config{Source: sourceDir} - reg := &Registry{} + store := install.NewMetadataStore() + store.Set("frontend/pdf", &install.MetadataEntry{ + Source: "anthropics/skills/skills/pdf", + Group: "frontend", + }) - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) - } - if reg.Skills[0].Name != "pdf" { - t.Errorf("expected bare name 'pdf', got %q", reg.Skills[0].Name) - } - if reg.Skills[0].Group != "frontend" { - t.Errorf("expected group 'frontend', got %q", reg.Skills[0].Group) + entry := store.Get("frontend/pdf") + if entry == nil { + t.Fatal("expected store to have 'frontend/pdf'") } - if reg.Skills[0].FullName() != "frontend/pdf" { - t.Errorf("expected FullName 'frontend/pdf', got %q", reg.Skills[0].FullName()) + if entry.Group != "frontend" { + t.Errorf("expected group 'frontend', got %q", entry.Group) } } @@ -204,55 +189,6 @@ func TestReconcileGlobalSkills_PrunesStaleEntries(t *testing.T) { if err := os.MkdirAll(skillPath, 0755); err != nil { t.Fatal(err) } - meta := map[string]string{"source": "github.com/user/alive"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } - - cfgData, _ := yaml.Marshal(&Config{Source: sourceDir}) - if err := os.WriteFile(configPath, cfgData, 0644); err != nil { - t.Fatal(err) - } - t.Setenv("SKILLSHARE_CONFIG", configPath) - - // Registry has both the alive skill and a stale one (not on disk) - cfg := &Config{Source: sourceDir} - reg := &Registry{ - Skills: []SkillEntry{ - {Name: "alive-skill", Source: "github.com/user/alive"}, - {Name: "deleted-skill", Source: "github.com/user/deleted"}, - {Group: "frontend", Name: "gone-skill", Source: "github.com/user/gone"}, - }, - } - - if err := ReconcileGlobalSkills(cfg, reg); err != nil { - t.Fatalf("ReconcileGlobalSkills failed: %v", err) - } - - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill after prune, got %d: %+v", len(reg.Skills), reg.Skills) - } - if reg.Skills[0].Name != "alive-skill" { - t.Errorf("expected surviving skill 'alive-skill', got %q", reg.Skills[0].Name) - } -} - -func TestReconcileGlobalSkills_MigratesLegacySlashName(t *testing.T) { - root := t.TempDir() - sourceDir := filepath.Join(root, "skills") - configPath := filepath.Join(root, "config.yaml") - - // Create nested skill on disk - skillPath := filepath.Join(sourceDir, "frontend", "pdf") - if err := os.MkdirAll(skillPath, 0755); err != nil { - t.Fatal(err) - } - meta := map[string]string{"source": "anthropics/skills/skills/pdf"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillPath, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatal(err) - } cfgData, _ := yaml.Marshal(&Config{Source: sourceDir}) if err := os.WriteFile(configPath, cfgData, 0644); err != nil { @@ -260,23 +196,21 @@ func TestReconcileGlobalSkills_MigratesLegacySlashName(t *testing.T) { } t.Setenv("SKILLSHARE_CONFIG", configPath) - // Start with legacy format: name contains slash, no group cfg := &Config{Source: sourceDir} - reg := &Registry{ - Skills: []SkillEntry{{Name: "frontend/pdf", Source: "anthropics/skills/skills/pdf"}}, - } + store := install.NewMetadataStore() + store.Set("alive-skill", &install.MetadataEntry{Source: "github.com/user/alive"}) + store.Set("deleted-skill", &install.MetadataEntry{Source: "github.com/user/deleted"}) + store.Set("frontend/gone-skill", &install.MetadataEntry{Source: "github.com/user/gone", Group: "frontend"}) - if err := ReconcileGlobalSkills(cfg, reg); err != nil { + if err := ReconcileGlobalSkills(cfg, store); err != nil { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - if len(reg.Skills) != 1 { - t.Fatalf("expected 1 skill, got %d", len(reg.Skills)) - } - if reg.Skills[0].Name != "pdf" { - t.Errorf("expected migrated name 'pdf', got %q", reg.Skills[0].Name) + names := store.List() + if len(names) != 1 { + t.Fatalf("expected 1 entry after prune, got %d: %v", len(names), names) } - if reg.Skills[0].Group != "frontend" { - t.Errorf("expected migrated group 'frontend', got %q", reg.Skills[0].Group) + if !store.Has("alive-skill") { + t.Errorf("expected surviving entry 'alive-skill'") } } diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index a1dde6cc..a23e0483 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -266,11 +266,11 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { // Reconcile config after install if installed > 0 { if s.IsProjectMode() { - if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.registry, s.cfg.Source); rErr != nil { + if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { log.Printf("warning: failed to reconcile project skills config: %v", rErr) } } else { - if rErr := config.ReconcileGlobalSkills(s.cfg, s.registry); rErr != nil { + if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { log.Printf("warning: failed to reconcile global skills config: %v", rErr) } } @@ -355,11 +355,11 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { } // Reconcile config after tracked repo install if s.IsProjectMode() { - if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.registry, s.cfg.Source); rErr != nil { + if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { log.Printf("warning: failed to reconcile project skills config: %v", rErr) } } else { - if rErr := config.ReconcileGlobalSkills(s.cfg, s.registry); rErr != nil { + if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { log.Printf("warning: failed to reconcile global skills config: %v", rErr) } } @@ -439,11 +439,11 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { // Reconcile config after single install if s.IsProjectMode() { - if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.registry, s.cfg.Source); rErr != nil { + if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { log.Printf("warning: failed to reconcile project skills config: %v", rErr) } } else { - if rErr := config.ReconcileGlobalSkills(s.cfg, s.registry); rErr != nil { + if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { log.Printf("warning: failed to reconcile global skills config: %v", rErr) } } diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index b29c441d..1be625b7 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -10,7 +10,6 @@ import ( "strings" "time" - "skillshare/internal/config" "skillshare/internal/git" "skillshare/internal/install" "skillshare/internal/resource" @@ -349,7 +348,7 @@ func (s *Server) handleUninstallRepo(w http.ResponseWriter, r *http.Request) { return } - // Prune registry entries: the repo itself + skills belonging to it. + // Prune store entries: the repo itself + skills belonging to it. // Legacy entries use Group without "_" prefix (e.g., "team-skills" for repo "_team-skills"). // Only apply legacy matching for top-level repos (no "/" in repoName) to avoid // basename collisions between sibling nested repos like org/_team-skills vs dept/_team-skills. @@ -357,35 +356,35 @@ func (s *Server) handleUninstallRepo(w http.ResponseWriter, r *http.Request) { if !strings.Contains(repoName, "/") { legacyGroup = strings.TrimPrefix(repoName, "_") } - filtered := make([]config.SkillEntry, 0, len(s.registry.Skills)) - for _, entry := range s.registry.Skills { - fullName := entry.FullName() + for _, name := range s.skillsStore.List() { + entry := s.skillsStore.Get(name) + if entry == nil { + continue + } // Match the repo's own entry (e.g., "_team-skills" or "org/_team-skills") - if fullName == repoName { + if name == repoName { + s.skillsStore.Remove(name) continue } // Match tracked skills grouped under this repo (exact group match) if entry.Tracked && entry.Group == repoName { + s.skillsStore.Remove(name) continue } // Match legacy grouped entries (top-level repos only, e.g., group="team-skills") if legacyGroup != "" && entry.Tracked && entry.Group == legacyGroup { + s.skillsStore.Remove(name) continue } // Match nested members (e.g., "org/_team-skills/sub-skill") - if strings.HasPrefix(fullName, repoName+"/") { + if strings.HasPrefix(name, repoName+"/") { + s.skillsStore.Remove(name) continue } - filtered = append(filtered, entry) } - s.registry.Skills = filtered - regDir := s.cfg.RegistryDir - if s.IsProjectMode() { - regDir = filepath.Join(s.projectRoot, ".skillshare") - } - if err := s.registry.Save(regDir); err != nil { - log.Printf("warning: failed to save registry after repo uninstall: %v", err) + if err := s.skillsStore.Save(s.cfg.Source); err != nil { + log.Printf("warning: failed to save metadata after repo uninstall: %v", err) } s.writeOpsLog("uninstall", "ok", start, map[string]any{ diff --git a/internal/server/handler_skills_test.go b/internal/server/handler_skills_test.go index f87f2286..e455b602 100644 --- a/internal/server/handler_skills_test.go +++ b/internal/server/handler_skills_test.go @@ -9,7 +9,7 @@ import ( "strings" "testing" - "skillshare/internal/config" + "skillshare/internal/install" "skillshare/internal/trash" ) @@ -178,14 +178,11 @@ func TestHandleUninstallRepo_PrunesRegistry(t *testing.T) { addTrackedRepo(t, src, "_team-skills") addSkill(t, src, "unrelated-skill") // must exist on disk to survive reconcile - // Seed registry with entries belonging to this repo - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "vue-best-practices", Group: "team-skills", Tracked: true}, - {Name: "react-patterns", Group: "team-skills", Tracked: true}, - {Name: "unrelated-skill", Group: ""}, - }, - } + // Seed store with entries belonging to this repo + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("team-skills/vue-best-practices", &install.MetadataEntry{Group: "team-skills", Tracked: true}) + s.skillsStore.Set("team-skills/react-patterns", &install.MetadataEntry{Group: "team-skills", Tracked: true}) + s.skillsStore.Set("unrelated-skill", &install.MetadataEntry{}) req := httptest.NewRequest(http.MethodDelete, "/api/repos/_team-skills", nil) req.SetPathValue("name", "_team-skills") @@ -196,10 +193,11 @@ func TestHandleUninstallRepo_PrunesRegistry(t *testing.T) { t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) } - // Registry should not contain any team-skills entries - for _, entry := range s.registry.Skills { - if entry.Group == "team-skills" { - t.Fatalf("expected team-skills entries to be pruned, but found %q", entry.Name) + // Store should not contain any team-skills entries + for _, name := range s.skillsStore.List() { + entry := s.skillsStore.Get(name) + if entry != nil && entry.Group == "team-skills" { + t.Fatalf("expected team-skills entries to be pruned, but found %q", name) } } } @@ -209,14 +207,11 @@ func TestHandleUninstallRepo_NestedPruneDoesNotAffectSibling(t *testing.T) { addTrackedRepo(t, src, filepath.Join("org", "_team-skills")) addTrackedRepo(t, src, filepath.Join("dept", "_team-skills")) - // Seed registry: entries from both nested repos + an exact-group entry - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "vue", Group: "org/_team-skills", Tracked: true}, - {Name: "react", Group: "dept/_team-skills", Tracked: true}, - {Name: "unrelated", Group: ""}, - }, - } + // Seed store: entries from both nested repos + an exact-group entry + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("org/_team-skills/vue", &install.MetadataEntry{Group: "org/_team-skills", Tracked: true}) + s.skillsStore.Set("dept/_team-skills/react", &install.MetadataEntry{Group: "dept/_team-skills", Tracked: true}) + s.skillsStore.Set("unrelated", &install.MetadataEntry{}) req := httptest.NewRequest(http.MethodDelete, "/api/repos/org/_team-skills", nil) req.SetPathValue("name", "org/_team-skills") @@ -228,16 +223,18 @@ func TestHandleUninstallRepo_NestedPruneDoesNotAffectSibling(t *testing.T) { } // org/_team-skills entries should be pruned - for _, entry := range s.registry.Skills { - if entry.Group == "org/_team-skills" { - t.Fatalf("expected org/_team-skills entries to be pruned, but found %q", entry.Name) + for _, name := range s.skillsStore.List() { + entry := s.skillsStore.Get(name) + if entry != nil && entry.Group == "org/_team-skills" { + t.Fatalf("expected org/_team-skills entries to be pruned, but found %q", name) } } // dept/_team-skills entries must survive var found bool - for _, entry := range s.registry.Skills { - if entry.Group == "dept/_team-skills" { + for _, name := range s.skillsStore.List() { + entry := s.skillsStore.Get(name) + if entry != nil && entry.Group == "dept/_team-skills" { found = true break } @@ -266,11 +263,8 @@ func TestHandleUninstallRepo_ProjectMode_GitignorePath(t *testing.T) { gitignorePath := filepath.Join(gitignoreDir, ".gitignore") os.WriteFile(gitignorePath, []byte("# BEGIN SKILLSHARE MANAGED - DO NOT EDIT\nskills/_team-skills/\n# END SKILLSHARE MANAGED\n"), 0644) - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "_team-skills", Tracked: true}, - }, - } + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("_team-skills", &install.MetadataEntry{Tracked: true}) req := httptest.NewRequest(http.MethodDelete, "/api/repos/_team-skills", nil) req.SetPathValue("name", "_team-skills") @@ -376,14 +370,11 @@ func TestHandleUninstallRepo_PrunesNestedFullPathGroup(t *testing.T) { s, src := newTestServer(t) addTrackedRepo(t, src, filepath.Join("org", "_team-skills")) - // Registry with entries using full nested path as Group (new reconcile format) - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "vue", Group: "org/_team-skills", Tracked: true}, - {Name: "react", Group: "org/_team-skills", Tracked: true}, - {Name: "unrelated", Group: ""}, - }, - } + // Store with entries using full nested path as Group (new reconcile format) + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("org/_team-skills/vue", &install.MetadataEntry{Group: "org/_team-skills", Tracked: true}) + s.skillsStore.Set("org/_team-skills/react", &install.MetadataEntry{Group: "org/_team-skills", Tracked: true}) + s.skillsStore.Set("unrelated", &install.MetadataEntry{}) req := httptest.NewRequest(http.MethodDelete, "/api/repos/org/_team-skills", nil) req.SetPathValue("name", "org/_team-skills") @@ -395,11 +386,12 @@ func TestHandleUninstallRepo_PrunesNestedFullPathGroup(t *testing.T) { } // All org/_team-skills entries should be pruned, unrelated survives - if len(s.registry.Skills) != 1 { - t.Fatalf("expected 1 surviving entry, got %d", len(s.registry.Skills)) + names := s.skillsStore.List() + if len(names) != 1 { + t.Fatalf("expected 1 surviving entry, got %d", len(names)) } - if s.registry.Skills[0].Name != "unrelated" { - t.Fatalf("expected 'unrelated' to survive, got %q", s.registry.Skills[0].Name) + if !s.skillsStore.Has("unrelated") { + t.Fatalf("expected 'unrelated' to survive") } } @@ -407,14 +399,11 @@ func TestHandleUninstallRepo_PrunesNestedMembersByPrefix(t *testing.T) { s, src := newTestServer(t) addTrackedRepo(t, src, filepath.Join("org", "_team-skills")) - // Registry with the repo's own entry + a sub-skill using FullName prefix - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "org/_team-skills", Group: "", Tracked: true}, // repo entry - {Name: "sub-skill", Group: "org/_team-skills", Tracked: true}, // member - {Name: "standalone", Group: ""}, - }, - } + // Store with the repo's own entry + a sub-skill using name prefix + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("org/_team-skills", &install.MetadataEntry{Tracked: true}) // repo entry + s.skillsStore.Set("org/_team-skills/sub-skill", &install.MetadataEntry{Group: "org/_team-skills", Tracked: true}) // member + s.skillsStore.Set("standalone", &install.MetadataEntry{}) req := httptest.NewRequest(http.MethodDelete, "/api/repos/org/_team-skills", nil) req.SetPathValue("name", "org/_team-skills") @@ -425,10 +414,11 @@ func TestHandleUninstallRepo_PrunesNestedMembersByPrefix(t *testing.T) { t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) } - if len(s.registry.Skills) != 1 { - t.Fatalf("expected 1 surviving entry, got %d", len(s.registry.Skills)) + names := s.skillsStore.List() + if len(names) != 1 { + t.Fatalf("expected 1 surviving entry, got %d", len(names)) } - if s.registry.Skills[0].Name != "standalone" { - t.Fatalf("expected 'standalone' to survive, got %q", s.registry.Skills[0].Name) + if !s.skillsStore.Has("standalone") { + t.Fatalf("expected 'standalone' to survive") } } diff --git a/internal/server/handler_sync_test.go b/internal/server/handler_sync_test.go index 582d758d..237b82c1 100644 --- a/internal/server/handler_sync_test.go +++ b/internal/server/handler_sync_test.go @@ -9,7 +9,7 @@ import ( "strings" "testing" - "skillshare/internal/config" + "skillshare/internal/install" ) func TestHandleSync_MergeMode(t *testing.T) { @@ -53,16 +53,13 @@ func TestHandleSync_IgnoredSkillNotPrunedFromRegistry(t *testing.T) { // Add .skillignore to exclude the second skill os.WriteFile(filepath.Join(src, ".skillignore"), []byte("ignored-skill\n"), 0644) - // Pre-populate registry with both entries and persist to disk - // (server auto-reloads registry from disk on each request) - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "kept-skill", Source: "github.com/user/kept"}, - {Name: "ignored-skill", Source: "github.com/user/ignored"}, - }, - } - if err := s.registry.Save(s.cfg.RegistryDir); err != nil { - t.Fatalf("failed to save registry: %v", err) + // Pre-populate store with both entries and persist to disk + // (server auto-reloads metadata from disk on each request) + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("kept-skill", &install.MetadataEntry{Source: "github.com/user/kept"}) + s.skillsStore.Set("ignored-skill", &install.MetadataEntry{Source: "github.com/user/ignored"}) + if err := s.skillsStore.Save(src); err != nil { + t.Fatalf("failed to save metadata: %v", err) } // Run sync (non-dry-run) @@ -76,12 +73,9 @@ func TestHandleSync_IgnoredSkillNotPrunedFromRegistry(t *testing.T) { } // Both entries should survive — ignored skill still exists on disk - if len(s.registry.Skills) != 2 { - names := make([]string, len(s.registry.Skills)) - for i, sk := range s.registry.Skills { - names[i] = sk.Name - } - t.Fatalf("expected 2 registry entries after sync, got %d: %v", len(s.registry.Skills), names) + names := s.skillsStore.List() + if len(names) != 2 { + t.Fatalf("expected 2 metadata entries after sync, got %d: %v", len(names), names) } } diff --git a/internal/server/handler_uninstall.go b/internal/server/handler_uninstall.go index ef0c10ce..2b00d5ba 100644 --- a/internal/server/handler_uninstall.go +++ b/internal/server/handler_uninstall.go @@ -178,47 +178,45 @@ func (s *Server) handleBatchUninstall(w http.ResponseWriter, r *http.Request) { if succeeded > 0 { // removedPaths contains exact RelPaths (e.g. "frontend/vue/vue-best-practices") // and repo dir names (e.g. "_team-skills"), collected during the uninstall loop. - filtered := make([]config.SkillEntry, 0, len(s.registry.Skills)) - for _, entry := range s.registry.Skills { - fullName := entry.FullName() - if removedPaths[fullName] || removedPaths[entry.Name] { + for _, name := range s.skillsStore.List() { + entry := s.skillsStore.Get(name) + if entry == nil { continue } - // Tracked repos: registry stores group without "_" prefix (e.g., group="team-skills" + if removedPaths[name] { + s.skillsStore.Remove(name) + continue + } + // Tracked repos: store uses group without "_" prefix (e.g., group="team-skills" // for repo dir "_team-skills"). Reconstruct the prefixed name to match removedPaths. if entry.Group != "" && removedPaths["_"+entry.Group] { + s.skillsStore.Remove(name) continue } // When a group directory is uninstalled, also remove its member skills memberOfRemoved := false for rp := range removedPaths { - if strings.HasPrefix(fullName, rp+"/") { + if strings.HasPrefix(name, rp+"/") { memberOfRemoved = true break } } if memberOfRemoved { - continue + s.skillsStore.Remove(name) } - filtered = append(filtered, entry) } - s.registry.Skills = filtered - regDir := s.cfg.RegistryDir - if s.IsProjectMode() { - regDir = filepath.Join(s.projectRoot, ".skillshare") - } - if err := s.registry.Save(regDir); err != nil { - log.Printf("warning: failed to save registry: %v", err) + if err := s.skillsStore.Save(s.cfg.Source); err != nil { + log.Printf("warning: failed to save metadata: %v", err) } if s.IsProjectMode() { if rErr := config.ReconcileProjectSkills( - s.projectRoot, s.projectCfg, s.registry, s.cfg.Source); rErr != nil { + s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { log.Printf("warning: failed to reconcile project skills config: %v", rErr) } } else { - if rErr := config.ReconcileGlobalSkills(s.cfg, s.registry); rErr != nil { + if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { log.Printf("warning: failed to reconcile global skills config: %v", rErr) } } diff --git a/internal/server/handler_uninstall_test.go b/internal/server/handler_uninstall_test.go index 199a8aa1..e72ab11d 100644 --- a/internal/server/handler_uninstall_test.go +++ b/internal/server/handler_uninstall_test.go @@ -10,7 +10,7 @@ import ( "strings" "testing" - "skillshare/internal/config" + "skillshare/internal/install" ) func TestHandleBatchUninstall_ProjectMode_GitignorePath(t *testing.T) { @@ -33,11 +33,8 @@ func TestHandleBatchUninstall_ProjectMode_GitignorePath(t *testing.T) { "# BEGIN SKILLSHARE MANAGED - DO NOT EDIT\nskills/_team-skills/\n# END SKILLSHARE MANAGED\n", ), 0644) - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "_team-skills", Tracked: true}, - }, - } + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("_team-skills", &install.MetadataEntry{Tracked: true}) body := batchUninstallRequest{Names: []string{"_team-skills"}, Force: true} b, _ := json.Marshal(body) @@ -70,11 +67,8 @@ func TestHandleBatchUninstall_GlobalMode_GitignorePath(t *testing.T) { "# BEGIN SKILLSHARE MANAGED - DO NOT EDIT\n_team-skills/\n# END SKILLSHARE MANAGED\n", ), 0644) - s.registry = &config.Registry{ - Skills: []config.SkillEntry{ - {Name: "_team-skills", Tracked: true}, - }, - } + s.skillsStore = install.NewMetadataStore() + s.skillsStore.Set("_team-skills", &install.MetadataEntry{Tracked: true}) body := batchUninstallRequest{Names: []string{"_team-skills"}, Force: true} b, _ := json.Marshal(body) diff --git a/internal/server/server.go b/internal/server/server.go index f46dfed4..0d61b742 100644 --- a/internal/server/server.go +++ b/internal/server/server.go @@ -20,12 +20,13 @@ import ( // Server holds the HTTP server state type Server struct { - cfg *config.Config - registry *config.Registry - addr string - mux *http.ServeMux - handler http.Handler - mu sync.RWMutex // protects config: Lock for writes/reloads, RLock for reads + cfg *config.Config + skillsStore *install.MetadataStore + agentsStore *install.MetadataStore + addr string + mux *http.ServeMux + handler http.Handler + mu sync.RWMutex // protects config: Lock for writes/reloads, RLock for reads startTime time.Time // for uptime reporting in health check @@ -83,17 +84,22 @@ func (s *Server) wrapBasePath() { // New creates a new Server for global mode. // uiDistDir, when non-empty, serves UI from disk instead of the embedded SPA. func New(cfg *config.Config, addr, basePath, uiDistDir string) *Server { - reg, _ := config.LoadRegistry(cfg.RegistryDir) - if reg == nil { - reg = &config.Registry{} + skillsStore, _ := install.LoadMetadataWithMigration(cfg.Source, "") + if skillsStore == nil { + skillsStore = install.NewMetadataStore() + } + agentsStore, _ := install.LoadMetadataWithMigration(cfg.EffectiveAgentsSource(), "agent") + if agentsStore == nil { + agentsStore = install.NewMetadataStore() } s := &Server{ - cfg: cfg, - registry: reg, - addr: addr, - mux: http.NewServeMux(), - basePath: NormalizeBasePath(basePath), - uiDistDir: uiDistDir, + cfg: cfg, + skillsStore: skillsStore, + agentsStore: agentsStore, + addr: addr, + mux: http.NewServeMux(), + basePath: NormalizeBasePath(basePath), + uiDistDir: uiDistDir, } s.registerRoutes() s.handler = s.withConfigAutoReload(s.mux) @@ -104,13 +110,20 @@ func New(cfg *config.Config, addr, basePath, uiDistDir string) *Server { // NewProject creates a new Server for project mode. // uiDistDir, when non-empty, serves UI from disk instead of the embedded SPA. func NewProject(cfg *config.Config, projectCfg *config.ProjectConfig, projectRoot, addr, basePath, uiDistDir string) *Server { - reg, _ := config.LoadRegistry(filepath.Join(projectRoot, ".skillshare")) - if reg == nil { - reg = &config.Registry{} + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + agentsDir := filepath.Join(projectRoot, ".skillshare", "agents") + skillsStore, _ := install.LoadMetadataWithMigration(skillsDir, "") + if skillsStore == nil { + skillsStore = install.NewMetadataStore() + } + agentsStore, _ := install.LoadMetadataWithMigration(agentsDir, "agent") + if agentsStore == nil { + agentsStore = install.NewMetadataStore() } s := &Server{ cfg: cfg, - registry: reg, + skillsStore: skillsStore, + agentsStore: agentsStore, addr: addr, mux: http.NewServeMux(), basePath: NormalizeBasePath(basePath), @@ -208,8 +221,13 @@ func (s *Server) reloadConfig() error { return err } s.cfg.Targets = targets - if reg, err := config.LoadRegistry(filepath.Join(s.projectRoot, ".skillshare")); err == nil { - s.registry = reg + skillsDir := filepath.Join(s.projectRoot, ".skillshare", "skills") + agentsDir := filepath.Join(s.projectRoot, ".skillshare", "agents") + if st, err := install.LoadMetadata(skillsDir); err == nil { + s.skillsStore = st + } + if st, err := install.LoadMetadata(agentsDir); err == nil { + s.agentsStore = st } return nil } @@ -218,8 +236,11 @@ func (s *Server) reloadConfig() error { return err } s.cfg = newCfg - if reg, err := config.LoadRegistry(s.cfg.RegistryDir); err == nil { - s.registry = reg + if st, err := install.LoadMetadata(newCfg.Source); err == nil { + s.skillsStore = st + } + if st, err := install.LoadMetadata(newCfg.EffectiveAgentsSource()); err == nil { + s.agentsStore = st } return nil } From ad05ed9a2f0790de95442eee5ab15e03518d3828 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 11:04:28 +0800 Subject: [PATCH 079/205] refactor: replace ReadMeta in update handlers with MetadataStore update.go, update_handlers.go, update_project.go, update_resolve.go now use MetadataStore.Get() instead of per-skill ReadMeta. --- cmd/skillshare/update.go | 10 +++++----- cmd/skillshare/update_handlers.go | 24 ++++++++++++++++-------- cmd/skillshare/update_project.go | 18 ++++++++++-------- cmd/skillshare/update_resolve.go | 18 ++++++++++-------- 4 files changed, 41 insertions(+), 29 deletions(-) diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index b25d15de..9b6fdffe 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -215,6 +215,7 @@ func cmdUpdate(args []string) error { // Recursive discovery for --all scanSpinner := ui.StartSpinner("Scanning skills...") walkRoot := utils.ResolveSymlink(cfg.Source) + metaStore, _ := install.LoadMetadataWithMigration(cfg.Source, "") err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { if err != nil || path == walkRoot { return nil @@ -241,12 +242,11 @@ func cmdUpdate(args []string) error { // Regular skill if !info.IsDir() && info.Name() == "SKILL.md" { skillDir := filepath.Dir(path) - meta, metaErr := install.ReadMeta(skillDir) - if metaErr == nil && meta != nil && meta.Source != "" { - rel, _ := filepath.Rel(walkRoot, skillDir) - if rel != "." && !seen[rel] { + rel, _ := filepath.Rel(walkRoot, skillDir) + if rel != "." && !seen[rel] { + if entry := metaStore.Get(rel); entry != nil && entry.Source != "" { seen[rel] = true - targets = append(targets, updateTarget{name: rel, path: skillDir, isRepo: false, meta: meta}) + targets = append(targets, updateTarget{name: rel, path: skillDir, isRepo: false, meta: entry}) } } } diff --git a/cmd/skillshare/update_handlers.go b/cmd/skillshare/update_handlers.go index f659b34c..19dea97b 100644 --- a/cmd/skillshare/update_handlers.go +++ b/cmd/skillshare/update_handlers.go @@ -216,10 +216,11 @@ func updateRegularSkill(uc *updateContext, skillName string) (updateResult, erro skillPath := filepath.Join(uc.sourcePath, skillName) // Read metadata to get source - meta, err := install.ReadMeta(skillPath) - if err != nil { - return updateResult{skipped: 1}, fmt.Errorf("cannot read metadata for '%s': %w", skillName, err) + store, storeErr := install.LoadMetadataWithMigration(uc.sourcePath, "") + if storeErr != nil { + return updateResult{skipped: 1}, fmt.Errorf("cannot read metadata for '%s': %w", skillName, storeErr) } + meta := store.Get(skillName) if meta == nil || meta.Source == "" { return updateResult{skipped: 1}, fmt.Errorf("skill '%s' has no source metadata, cannot update", skillName) } @@ -348,9 +349,9 @@ func updateTrackedRepoQuick(uc *updateContext, repoPath string) (bool, *audit.Re // updateSkillFromMeta updates a skill using its metadata in batch mode. // Output is suppressed; caller handles display via progress bar. -// If cachedMeta is non-nil it is used directly; otherwise metadata is read from disk. +// If cachedMeta is non-nil it is used directly; otherwise metadata is loaded from the store. // Returns (updated, installResult, error). -func updateSkillFromMeta(uc *updateContext, skillPath string, cachedMeta *install.SkillMeta) (bool, *install.InstallResult, error) { +func updateSkillFromMeta(uc *updateContext, skillPath string, cachedMeta *install.MetadataEntry) (bool, *install.InstallResult, error) { if uc.opts.dryRun { return false, nil, nil } @@ -361,9 +362,16 @@ func updateSkillFromMeta(uc *updateContext, skillPath string, cachedMeta *instal meta := cachedMeta if meta == nil { - var readErr error - meta, readErr = install.ReadMeta(skillPath) - if readErr != nil || meta == nil || meta.Source == "" { + store, _ := install.LoadMetadataWithMigration(uc.sourcePath, "") + skillName := filepath.Base(skillPath) + // Try base name first, then relative path from source + meta = store.Get(skillName) + if meta == nil { + if rel, relErr := filepath.Rel(uc.sourcePath, skillPath); relErr == nil { + meta = store.Get(rel) + } + } + if meta == nil || meta.Source == "" { return false, nil, nil } } diff --git a/cmd/skillshare/update_project.go b/cmd/skillshare/update_project.go index c342a887..2a47c9d3 100644 --- a/cmd/skillshare/update_project.go +++ b/cmd/skillshare/update_project.go @@ -59,6 +59,8 @@ func cmdUpdateProjectBatch(sourcePath string, opts *updateOptions, projectRoot s seen := map[string]bool{} var resolveWarnings []string + metaStore, _ := install.LoadMetadataWithMigration(sourcePath, "") + for _, name := range opts.names { // Check group directory first (before repo/skill lookup, // so "feature-radar" expands to all skills rather than @@ -104,11 +106,11 @@ func cmdUpdateProjectBatch(sourcePath string, opts *updateOptions, projectRoot s // Regular skill with metadata skillPath := filepath.Join(sourcePath, name) if info, err := os.Stat(skillPath); err == nil && info.IsDir() { - meta, metaErr := install.ReadMeta(skillPath) - if metaErr == nil && meta != nil && meta.Source != "" { + entry := metaStore.Get(name) + if entry != nil && entry.Source != "" { if !seen[skillPath] { seen[skillPath] = true - targets = append(targets, updateTarget{name: name, path: skillPath, isRepo: false, meta: meta}) + targets = append(targets, updateTarget{name: name, path: skillPath, isRepo: false, meta: entry}) } continue } @@ -187,6 +189,7 @@ func updateAllProjectSkills(uc *updateContext) (*updateResult, error) { scanSpinner := ui.StartSpinner("Scanning skills...") walkRoot := utils.ResolveSymlink(uc.sourcePath) + metaStore, _ := install.LoadMetadataWithMigration(uc.sourcePath, "") err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { if err != nil { return nil @@ -213,11 +216,10 @@ func updateAllProjectSkills(uc *updateContext) (*updateResult, error) { // Regular skill with metadata if !info.IsDir() && info.Name() == "SKILL.md" { skillDir := filepath.Dir(path) - meta, metaErr := install.ReadMeta(skillDir) - if metaErr == nil && meta != nil && meta.Source != "" { - rel, _ := filepath.Rel(walkRoot, skillDir) - if rel != "." { - targets = append(targets, updateTarget{name: rel, path: skillDir, isRepo: false, meta: meta}) + rel, _ := filepath.Rel(walkRoot, skillDir) + if rel != "." { + if entry := metaStore.Get(rel); entry != nil && entry.Source != "" { + targets = append(targets, updateTarget{name: rel, path: skillDir, isRepo: false, meta: entry}) } } } diff --git a/cmd/skillshare/update_resolve.go b/cmd/skillshare/update_resolve.go index ccc1e804..ad331bee 100644 --- a/cmd/skillshare/update_resolve.go +++ b/cmd/skillshare/update_resolve.go @@ -12,10 +12,10 @@ import ( ) type updateTarget struct { - name string // relative path from source dir (display name) - path string // absolute path on disk - isRepo bool // true for tracked repos (_-prefixed git repos) - meta *install.SkillMeta // cached metadata; nil for tracked repos + name string // relative path from source dir (display name) + path string // absolute path on disk + isRepo bool // true for tracked repos (_-prefixed git repos) + meta *install.MetadataEntry // cached metadata; nil for tracked repos } // resolveByBasename searches nested skills and tracked repos by their @@ -122,9 +122,10 @@ func resolveGroupUpdatable(group, sourceDir string) ([]updateTarget, error) { return filepath.SkipDir } - // Skill with metadata (has .skillshare-meta.json) - if meta, metaErr := install.ReadMeta(path); metaErr == nil && meta != nil && meta.Source != "" { - matches = append(matches, updateTarget{name: rel, path: path, isRepo: false, meta: meta}) + // Skill with metadata (centralized store) + store, _ := install.LoadMetadata(resolvedSourceDir) + if entry := store.Get(rel); entry != nil && entry.Source != "" { + matches = append(matches, updateTarget{name: rel, path: path, isRepo: false, meta: entry}) return filepath.SkipDir } @@ -150,7 +151,8 @@ func isGroupDir(name, sourceDir string) bool { return false } // Not a skill with metadata - if meta, metaErr := install.ReadMeta(path); metaErr == nil && meta != nil && meta.Source != "" { + store, _ := install.LoadMetadata(sourceDir) + if entry := store.Get(name); entry != nil && entry.Source != "" { return false } // Not a skill directory (has SKILL.md) From e62f729f1804b9c6e7eb8fc80fbd456f948c1768 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 11:08:24 +0800 Subject: [PATCH 080/205] refactor: partial uninstall migration to MetadataStore --- cmd/skillshare/uninstall.go | 18 +++++++++++++----- cmd/skillshare/uninstall_project.go | 8 +++++++- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 506ff1b3..3123372b 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -397,7 +397,7 @@ func (s uninstallTypeSummary) details() string { } // displayUninstallInfo shows information about the skill to be uninstalled -func displayUninstallInfo(target *uninstallTarget) { +func displayUninstallInfo(target *uninstallTarget, store *install.MetadataStore) { if target.isTrackedRepo { ui.Header("Uninstalling tracked repository") ui.Info("Type: tracked repository") @@ -412,9 +412,11 @@ func displayUninstallInfo(target *uninstallTarget) { } else { ui.Header("Uninstalling skill") } - if meta, err := install.ReadMeta(target.path); err == nil && meta != nil { - ui.Info("Source: %s", meta.Source) - ui.Info("Installed: %s", meta.InstalledAt.Format("2006-01-02 15:04")) + if entry := store.Get(target.name); entry != nil { + ui.Info("Source: %s", entry.Source) + if !entry.InstalledAt.IsZero() { + ui.Info("Installed: %s", entry.InstalledAt.Format("2006-01-02 15:04")) + } } } ui.Info("Name: %s", target.name) @@ -598,6 +600,12 @@ func cmdUninstall(args []string) error { return err } + // Load centralized metadata store for display/reinstall hints. + skillsStore, _ := install.LoadMetadataWithMigration(cfg.Source, "") + if skillsStore == nil { + skillsStore = install.NewMetadataStore() + } + // --- Phase 1: RESOLVE --- var targets []*uninstallTarget seen := map[string]bool{} // dedup by path @@ -732,7 +740,7 @@ func cmdUninstall(args []string) error { if opts.jsonOutput { // Skip display in JSON mode } else if single { - displayUninstallInfo(targets[0]) + displayUninstallInfo(targets[0], skillsStore) } else { ui.Header(fmt.Sprintf("Uninstalling %d %s", len(targets), summary.noun())) if len(targets) > 20 { diff --git a/cmd/skillshare/uninstall_project.go b/cmd/skillshare/uninstall_project.go index b7bbfe00..6cb86315 100644 --- a/cmd/skillshare/uninstall_project.go +++ b/cmd/skillshare/uninstall_project.go @@ -89,6 +89,12 @@ func cmdUninstallProject(args []string, root string) error { sourceDir := filepath.Join(root, ".skillshare", "skills") trashDir := trash.ProjectTrashDir(root) + // Load centralized metadata store for display/reinstall hints. + skillsStore, _ := install.LoadMetadataWithMigration(sourceDir, "") + if skillsStore == nil { + skillsStore = install.NewMetadataStore() + } + // Backward compat: ensure operational dirs are gitignored for projects created before v0.17.3. _ = ensureProjectGitignore(root, false) @@ -172,7 +178,7 @@ func cmdUninstallProject(args []string, root string) error { single := len(targets) == 1 summary := summarizeUninstallTargets(targets) if single { - displayUninstallInfo(targets[0]) + displayUninstallInfo(targets[0], skillsStore) } else { ui.Header(fmt.Sprintf("Uninstalling %d %s", len(targets), summary.noun())) if len(targets) > 20 { From d0a1df3e8b7a6262f0bd4f972170d4da6c3e988a Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 11:11:03 +0800 Subject: [PATCH 081/205] refactor: replace ReadMeta in server handler_skills and handler_update handler_skills.go uses s.skillsStore.Get() for skill metadata. handler_update.go uses store parameter in getServerUpdatableSkills. --- internal/server/handler_skills.go | 32 +++++++++++++----------- internal/server/handler_update.go | 21 ++++++++++------ internal/server/handler_update_stream.go | 2 +- 3 files changed, 33 insertions(+), 22 deletions(-) diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 1be625b7..e5309e62 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -76,13 +76,15 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { Disabled: d.Disabled, } - if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version - item.Branch = meta.Branch + if entry := s.skillsStore.Get(filepath.Base(d.SourcePath)); entry != nil { + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) + } + item.Source = entry.Source + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version + item.Branch = entry.Branch } enrichSkillBranch(&item) @@ -155,13 +157,15 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { Disabled: d.Disabled, } - if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { - item.InstalledAt = meta.InstalledAt.Format("2006-01-02T15:04:05Z") - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version - item.Branch = meta.Branch + if entry := s.skillsStore.Get(filepath.Base(d.SourcePath)); entry != nil { + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.Format("2006-01-02T15:04:05Z") + } + item.Source = entry.Source + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version + item.Branch = entry.Branch } enrichSkillBranch(&item) diff --git a/internal/server/handler_update.go b/internal/server/handler_update.go index 2743b39f..466d2ea4 100644 --- a/internal/server/handler_update.go +++ b/internal/server/handler_update.go @@ -126,7 +126,7 @@ func (s *Server) handleUpdate(w http.ResponseWriter, r *http.Request) { func (s *Server) updateSingle(name string, force, skipAudit bool) updateResultItem { // Try exact skill path first (prevents basename collision with nested repos) skillPath := filepath.Join(s.cfg.Source, name) - if meta, _ := install.ReadMeta(skillPath); meta != nil && meta.Source != "" { + if entry := s.skillsStore.Get(name); entry != nil && entry.Source != "" { return s.updateRegularSkill(name, skillPath, skipAudit) } @@ -262,8 +262,11 @@ func (s *Server) auditGateTrackedRepo(name, repoPath, beforeHash, threshold stri } func (s *Server) updateRegularSkill(name, skillPath string, skipAudit bool) updateResultItem { - meta, _ := install.ReadMeta(skillPath) - source, err := install.ParseSourceWithOptions(meta.Source, s.parseOpts()) + entry := s.skillsStore.Get(name) + if entry == nil { + return updateResultItem{Name: name, Action: "error", Message: "no metadata found"} + } + source, err := install.ParseSourceWithOptions(entry.Source, s.parseOpts()) if err != nil { return updateResultItem{ Name: name, @@ -315,7 +318,7 @@ func (s *Server) updateAll(force, skipAudit bool) []updateResultItem { } // Update regular skills with source metadata - skills, err := getServerUpdatableSkills(s.cfg.Source) + skills, err := getServerUpdatableSkills(s.cfg.Source, s.skillsStore) if err == nil { for _, skill := range skills { skillPath := filepath.Join(s.cfg.Source, skill) @@ -328,7 +331,7 @@ func (s *Server) updateAll(force, skipAudit bool) []updateResultItem { // getServerUpdatableSkills returns relative paths of skills that have metadata with a remote source. // It walks the source directory recursively to find nested skills (e.g. utils/ascii-box-check). -func getServerUpdatableSkills(sourceDir string) ([]string, error) { +func getServerUpdatableSkills(sourceDir string, store *install.MetadataStore) ([]string, error) { var skills []string walkRoot := utils.ResolveSymlink(sourceDir) err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, err error) error { @@ -351,8 +354,12 @@ func getServerUpdatableSkills(sourceDir string) ([]string, error) { return filepath.SkipDir } // Check if this directory has updatable metadata - meta, metaErr := install.ReadMeta(path) - if metaErr != nil || meta == nil || meta.Source == "" { + relName := filepath.Base(path) + if relP, relErr2 := filepath.Rel(walkRoot, path); relErr2 == nil { + relName = filepath.ToSlash(relP) + } + entry := store.Get(relName) + if entry == nil || entry.Source == "" { return nil // continue walking into subdirectories } relPath, relErr := filepath.Rel(walkRoot, path) diff --git a/internal/server/handler_update_stream.go b/internal/server/handler_update_stream.go index d89d4980..19319415 100644 --- a/internal/server/handler_update_stream.go +++ b/internal/server/handler_update_stream.go @@ -85,7 +85,7 @@ func (s *Server) handleUpdateStream(w http.ResponseWriter, r *http.Request) { }) } } - skills, err := getServerUpdatableSkills(source) + skills, err := getServerUpdatableSkills(source, s.skillsStore) if err == nil { for _, skill := range skills { items = append(items, updateItem{ From ed1beb1ccb86faa500a15e48608ab5bf87215ab7 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 11:19:27 +0800 Subject: [PATCH 082/205] refactor: replace all remaining ReadMeta/LoadRegistry with MetadataStore Replace every remaining call to the old per-skill metadata API (ReadMeta, RefreshMetaHashes, config.LoadRegistry) with the centralized MetadataStore across 10 files: Server handlers: - handler_check.go: use s.skillsStore.Get() instead of ReadMeta - handler_check_stream.go: same pattern as handler_check.go - handler_skills_batch.go: use s.skillsStore.RefreshHashes() + Save() CLI commands: - check.go: load store once in collectCheckItems, use MetadataEntry - doctor.go: load store once in checkSkillIntegrity - list.go: load store once in buildSkillEntries (replaces parallel ReadMeta) - uninstall.go: use skillsStore for display/dry-run/finalize - uninstall_project.go: same pattern, remove config.LoadRegistry - update_batch.go: pruneRegistry now uses MetadataStore Hub: - hub/index.go: load store once in BuildIndex The only remaining ReadMeta call is in a test file (search_batch_meta_roundtrip_test.go) which tests the shim itself. --- cmd/skillshare/check.go | 38 +++++++++------ cmd/skillshare/doctor.go | 25 ++++++---- cmd/skillshare/list.go | 43 ++++++++--------- cmd/skillshare/uninstall.go | 63 ++++++++++--------------- cmd/skillshare/uninstall_project.go | 59 +++++++++-------------- cmd/skillshare/update_batch.go | 36 +++++--------- internal/hub/index.go | 24 ++++++---- internal/server/handler_check.go | 59 +++++++++++------------ internal/server/handler_check_stream.go | 53 ++++++++++----------- internal/server/handler_skills_batch.go | 25 +++++++--- 10 files changed, 206 insertions(+), 219 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 5d95e0aa..0e28437b 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -51,7 +51,7 @@ type checkOptions struct { type skillWithMeta struct { name string path string - meta *install.SkillMeta + meta *install.MetadataEntry } // collectCheckItems reads metadata and partitions items for parallel checking. @@ -69,31 +69,37 @@ func collectCheckItems(sourceDir string, repos []string, skills []string) ( }) } + // Load centralized metadata store once for all skills. + store, _ := install.LoadMetadata(sourceDir) + if store == nil { + store = install.NewMetadataStore() + } + urlGroups := make(map[string][]skillWithMeta) var localResults []checkSkillResult for _, skill := range skills { skillPath := filepath.Join(sourceDir, skill) - meta, err := install.ReadMeta(skillPath) + entry := store.Get(skill) - if err != nil || meta == nil || meta.RepoURL == "" { + if entry == nil || entry.RepoURL == "" { result := checkSkillResult{Name: skill, Status: "local"} - if meta != nil { - result.Source = meta.Source - result.Version = meta.Version - if !meta.InstalledAt.IsZero() { - result.InstalledAt = meta.InstalledAt.Format("2006-01-02") + if entry != nil { + result.Source = entry.Source + result.Version = entry.Version + if !entry.InstalledAt.IsZero() { + result.InstalledAt = entry.InstalledAt.Format("2006-01-02") } } localResults = append(localResults, result) continue } - groupKey := urlBranchKey(meta.RepoURL, meta.Branch) + groupKey := urlBranchKey(entry.RepoURL, entry.Branch) urlGroups[groupKey] = append(urlGroups[groupKey], skillWithMeta{ name: skill, path: skillPath, - meta: meta, + meta: entry, }) } @@ -539,7 +545,7 @@ func resolveSkillStatuses( // Pre-fill base result fields for each skill type pending struct { result checkSkillResult - meta *install.SkillMeta + meta *install.MetadataEntry } items := make([]pending, len(group)) for i, sw := range group { @@ -744,10 +750,12 @@ func runCheckFiltered(sourceDir string, opts *checkOptions) error { // Single skill: show per-skill detail like update does if len(targets) == 1 && !targets[0].isRepo { t := targets[0] - skillPath := filepath.Join(sourceDir, t.name) - if meta, metaErr := install.ReadMeta(skillPath); metaErr == nil && meta != nil && meta.Source != "" { - ui.StepContinue("Skill", t.name) - ui.StepContinue("Source", meta.Source) + detailStore, _ := install.LoadMetadata(sourceDir) + if detailStore != nil { + if entry := detailStore.Get(t.name); entry != nil && entry.Source != "" { + ui.StepContinue("Skill", t.name) + ui.StepContinue("Source", entry.Source) + } } } } diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index e238bc47..08357e4d 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -671,7 +671,7 @@ func checkSkillIntegrity(result *doctorResult, discovered []sync.DiscoveredSkill return } - // Phase 1: filter to skills that have meta with file hashes (cheap ReadMeta only) + // Phase 1: filter to skills that have meta with file hashes type verifiable struct { name string path string @@ -680,22 +680,31 @@ func checkSkillIntegrity(result *doctorResult, discovered []sync.DiscoveredSkill var toVerify []verifiable var skippedNames []string + // Load centralized metadata store once. + var store *install.MetadataStore + if len(discovered) > 0 { + sourceDir := strings.TrimSuffix(discovered[0].SourcePath, discovered[0].RelPath) + sourceDir = strings.TrimRight(sourceDir, `/\`) + store, _ = install.LoadMetadata(sourceDir) + } + if store == nil { + store = install.NewMetadataStore() + } + for _, skill := range discovered { - meta, err := install.ReadMeta(skill.SourcePath) - if err != nil { - continue - } - if meta == nil { + skillName := filepath.Base(skill.SourcePath) + entry := store.Get(skillName) + if entry == nil { continue // Local skill without meta — expected, skip silently } - if meta.FileHashes == nil { + if entry.FileHashes == nil { skippedNames = append(skippedNames, skill.RelPath) continue } toVerify = append(toVerify, verifiable{ name: skill.RelPath, path: skill.SourcePath, - stored: meta.FileHashes, + stored: entry.FileHashes, }) } diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 179f74d7..668ea960 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -200,11 +200,22 @@ func sortSkillEntries(skills []skillEntry, sortBy string) { } // buildSkillEntries builds skill entries from discovered skills. -// ReadMeta calls are parallelized with a bounded worker pool. +// Metadata is read from the centralized .metadata.json store. func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { skills := make([]skillEntry, len(discovered)) - // Pre-fill non-I/O fields + // Load centralized metadata store once (derive source dir from first skill). + var store *install.MetadataStore + if len(discovered) > 0 { + sourceDir := strings.TrimSuffix(discovered[0].SourcePath, discovered[0].RelPath) + sourceDir = strings.TrimRight(sourceDir, `/\`) + store, _ = install.LoadMetadata(sourceDir) + } + if store == nil { + store = install.NewMetadataStore() + } + + // Pre-fill non-I/O fields + metadata from store for i, d := range discovered { skills[i] = skillEntry{ Name: d.FlatName, @@ -219,28 +230,18 @@ func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { skills[i].RepoName = parts[0] } } - } - // Parallel ReadMeta with bounded concurrency - const metaWorkers = 64 - sem := make(chan struct{}, metaWorkers) - var wg gosync.WaitGroup - - for i, d := range discovered { - wg.Add(1) - sem <- struct{}{} - go func(idx int, sourcePath string) { - defer wg.Done() - defer func() { <-sem }() - if meta, err := install.ReadMeta(sourcePath); err == nil && meta != nil { - skills[idx].Source = meta.Source - skills[idx].Type = meta.Type - skills[idx].InstalledAt = meta.InstalledAt.Format("2006-01-02") - skills[idx].Branch = meta.Branch + // Enrich from centralized metadata store + skillName := filepath.Base(d.SourcePath) + if entry := store.Get(skillName); entry != nil { + skills[i].Source = entry.Source + skills[i].Type = entry.Type + if !entry.InstalledAt.IsZero() { + skills[i].InstalledAt = entry.InstalledAt.Format("2006-01-02") } - }(i, d.SourcePath) + skills[i].Branch = entry.Branch + } } - wg.Wait() // Fallback: for tracked-repo skills with no branch in metadata, read from git. // Cache per-repo to avoid repeated subprocess calls for skills in the same repo. diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 3123372b..c3df6684 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -494,9 +494,9 @@ func performUninstallQuiet(target *uninstallTarget) (typeLabel string, err error // performUninstall moves the skill to trash (verbose single-target output). // Note: .gitignore cleanup is handled in batch by the caller. -func performUninstall(target *uninstallTarget) error { +func performUninstall(target *uninstallTarget, store *install.MetadataStore) error { // Read metadata before moving (for reinstall hint) - meta, _ := install.ReadMeta(target.path) + entry := store.Get(target.name) groupSkillCount := 0 if !target.isTrackedRepo { groupSkillCount = len(countGroupSkills(target.path)) @@ -515,8 +515,8 @@ func performUninstall(target *uninstallTarget) error { ui.Success("Uninstalled skill: %s", target.name) } ui.Info("Moved to trash (7 days): %s", trashPath) - if meta != nil && meta.Source != "" { - ui.Info("Reinstall: skillshare install %s", meta.Source) + if entry != nil && entry.Source != "" { + ui.Info("Reinstall: skillshare install %s", entry.Source) } ui.SectionLabel("Next Steps") ui.Info("Run 'skillshare sync' to update all targets") @@ -882,8 +882,8 @@ func cmdUninstall(args []string) error { if t.isTrackedRepo { ui.Warning("[dry-run] would remove %s from .gitignore", t.name) } - if meta, err := install.ReadMeta(t.path); err == nil && meta != nil && meta.Source != "" { - ui.Info("[dry-run] Reinstall: skillshare install %s", meta.Source) + if entry := skillsStore.Get(t.name); entry != nil && entry.Source != "" { + ui.Info("[dry-run] Reinstall: skillshare install %s", entry.Source) } } return nil @@ -1044,7 +1044,7 @@ func cmdUninstall(args []string) error { } } else { for _, t := range targets { - if err := performUninstall(t); err != nil { + if err := performUninstall(t, skillsStore); err != nil { failed = append(failed, fmt.Sprintf("%s: %v", t.name, err)) } else { succeeded = append(succeeded, t) @@ -1068,43 +1068,28 @@ func cmdUninstall(args []string) error { } // --- Phase 7: FINALIZE --- - // Batch-remove succeeded skills from registry + // Batch-remove succeeded skills from metadata store if len(succeeded) > 0 { - regDir := cfg.RegistryDir - reg, regErr := config.LoadRegistry(regDir) - if regErr != nil { - ui.Warning("Failed to load registry: %v", regErr) - } else if len(reg.Skills) > 0 { - removedNames := map[string]bool{} - for _, t := range succeeded { - removedNames[t.name] = true - } - updated := make([]config.SkillEntry, 0, len(reg.Skills)) - for _, s := range reg.Skills { - fullName := s.FullName() - if removedNames[fullName] { - continue - } - // When a group directory is uninstalled, also remove its member skills - memberOfRemoved := false - for name := range removedNames { - if strings.HasPrefix(fullName, name+"/") { - memberOfRemoved = true - break - } - } - if memberOfRemoved { - continue - } - updated = append(updated, s) + removedNames := map[string]bool{} + for _, t := range succeeded { + removedNames[t.name] = true + } + for _, name := range skillsStore.List() { + if removedNames[name] { + skillsStore.Remove(name) + continue } - if len(updated) != len(reg.Skills) { - reg.Skills = updated - if saveErr := reg.Save(regDir); saveErr != nil { - ui.Warning("Failed to update registry after uninstall: %v", saveErr) + // When a group directory is uninstalled, also remove its member skills + for rn := range removedNames { + if strings.HasPrefix(name, rn+"/") { + skillsStore.Remove(name) + break } } } + if saveErr := skillsStore.Save(cfg.Source); saveErr != nil { + ui.Warning("Failed to update metadata after uninstall: %v", saveErr) + } } opNames := uninstallOpNames(rest) diff --git a/cmd/skillshare/uninstall_project.go b/cmd/skillshare/uninstall_project.go index 6cb86315..4b7a91a5 100644 --- a/cmd/skillshare/uninstall_project.go +++ b/cmd/skillshare/uninstall_project.go @@ -7,7 +7,6 @@ import ( "path/filepath" "strings" - "skillshare/internal/config" "skillshare/internal/install" "skillshare/internal/sync" "skillshare/internal/trash" @@ -245,8 +244,8 @@ func cmdUninstallProject(args []string, root string) error { for _, t := range targets { ui.Warning("[dry-run] would move to trash: %s", t.path) ui.Warning("[dry-run] would update .skillshare/.gitignore") - if meta, err := install.ReadMeta(t.path); err == nil && meta != nil && meta.Source != "" { - ui.Info("[dry-run] Reinstall: skillshare install %s --project", meta.Source) + if entry := skillsStore.Get(t.name); entry != nil && entry.Source != "" { + ui.Info("[dry-run] Reinstall: skillshare install %s --project", entry.Source) } } return nil @@ -375,7 +374,7 @@ func cmdUninstallProject(args []string, root string) error { } } else { for _, t := range targets { - meta, _ := install.ReadMeta(t.path) + entry := skillsStore.Get(t.name) groupSkillCount := 0 if !t.isTrackedRepo { groupSkillCount = len(countGroupSkills(t.path)) @@ -396,8 +395,8 @@ func cmdUninstallProject(args []string, root string) error { ui.Success("Uninstalled skill: %s", t.name) } ui.Info("Moved to trash (7 days): %s", trashPath) - if meta != nil && meta.Source != "" { - ui.Info("Reinstall: skillshare install %s --project", meta.Source) + if entry != nil && entry.Source != "" { + ui.Info("Reinstall: skillshare install %s --project", entry.Source) } succeeded = append(succeeded, t) } @@ -415,42 +414,28 @@ func cmdUninstallProject(args []string, root string) error { } // --- Phase 7: FINALIZE --- + // Batch-remove succeeded skills from metadata store if len(succeeded) > 0 { - regDir := filepath.Join(root, ".skillshare") - reg, regErr := config.LoadRegistry(regDir) - if regErr != nil { - ui.Warning("Failed to load registry: %v", regErr) - } else if len(reg.Skills) > 0 { - removedNames := map[string]bool{} - for _, t := range succeeded { - removedNames[t.name] = true - } - updated := make([]config.SkillEntry, 0, len(reg.Skills)) - for _, s := range reg.Skills { - fullName := s.FullName() - if removedNames[fullName] { - continue - } - // When a group directory is uninstalled, also remove its member skills - memberOfRemoved := false - for name := range removedNames { - if strings.HasPrefix(fullName, name+"/") { - memberOfRemoved = true - break - } - } - if memberOfRemoved { - continue - } - updated = append(updated, s) + removedNames := map[string]bool{} + for _, t := range succeeded { + removedNames[t.name] = true + } + for _, name := range skillsStore.List() { + if removedNames[name] { + skillsStore.Remove(name) + continue } - if len(updated) != len(reg.Skills) { - reg.Skills = updated - if saveErr := reg.Save(regDir); saveErr != nil { - ui.Warning("Failed to update registry after uninstall: %v", saveErr) + // When a group directory is uninstalled, also remove its member skills + for rn := range removedNames { + if strings.HasPrefix(name, rn+"/") { + skillsStore.Remove(name) + break } } } + if saveErr := skillsStore.Save(sourceDir); saveErr != nil { + ui.Warning("Failed to update metadata after uninstall: %v", saveErr) + } } if !batch { diff --git a/cmd/skillshare/update_batch.go b/cmd/skillshare/update_batch.go index 856a5132..d680e639 100644 --- a/cmd/skillshare/update_batch.go +++ b/cmd/skillshare/update_batch.go @@ -2,12 +2,10 @@ package main import ( "fmt" - "path/filepath" "strings" "time" "skillshare/internal/audit" - "skillshare/internal/config" "skillshare/internal/install" "skillshare/internal/trash" "skillshare/internal/ui" @@ -337,36 +335,24 @@ func pruneSkill(skillPath, name string, uc *updateContext) error { return err } -// pruneRegistry removes pruned skill entries from the registry. +// pruneRegistry removes pruned skill entries from the metadata store. func pruneRegistry(prunedNames []string, uc *updateContext) { - var regDir string - if uc.isProject() { - regDir = filepath.Join(uc.projectRoot, ".skillshare") - } else { - regDir = uc.registryDir - } - - reg, err := config.LoadRegistry(regDir) - if err != nil || len(reg.Skills) == 0 { + store, err := install.LoadMetadata(uc.sourcePath) + if err != nil { return } - removedSet := make(map[string]bool, len(prunedNames)) - for _, n := range prunedNames { - removedSet[n] = true - } - - updated := make([]config.SkillEntry, 0, len(reg.Skills)) - for _, s := range reg.Skills { - if !removedSet[s.FullName()] { - updated = append(updated, s) + changed := false + for _, name := range prunedNames { + if store.Has(name) { + store.Remove(name) + changed = true } } - if len(updated) != len(reg.Skills) { - reg.Skills = updated - if saveErr := reg.Save(regDir); saveErr != nil { - ui.Warning("Failed to update registry after prune: %v", saveErr) + if changed { + if saveErr := store.Save(uc.sourcePath); saveErr != nil { + ui.Warning("Failed to update metadata after prune: %v", saveErr) } } } diff --git a/internal/hub/index.go b/internal/hub/index.go index 0650b35a..ee903ff2 100644 --- a/internal/hub/index.go +++ b/internal/hub/index.go @@ -64,24 +64,30 @@ func BuildIndex(sourcePath string, full bool, auditSkills bool) (*Index, error) return nil, err } + // Load centralized metadata store once for all skills. + store, _ := install.LoadMetadata(sourcePath) + if store == nil { + store = install.NewMetadataStore() + } + entries := make([]SkillEntry, len(discovered)) for i, d := range discovered { item := SkillEntry{ Name: filepath.Base(d.SourcePath), } - // Determine source: prefer meta.Source (remote origin), fallback to relPath. + // Determine source: prefer entry.Source (remote origin), fallback to relPath. source := d.RelPath - if meta, _ := install.ReadMeta(d.SourcePath); meta != nil { - if meta.Source != "" { - source = meta.Source + if entry := store.Get(item.Name); entry != nil { + if entry.Source != "" { + source = entry.Source } if full { - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version - if !meta.InstalledAt.IsZero() { - item.InstalledAt = meta.InstalledAt.UTC().Format(time.RFC3339) + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.UTC().Format(time.RFC3339) } } } diff --git a/internal/server/handler_check.go b/internal/server/handler_check.go index 11ac5d18..6fc4fb96 100644 --- a/internal/server/handler_check.go +++ b/internal/server/handler_check.go @@ -10,6 +10,12 @@ import ( "skillshare/internal/install" ) +// skillWithMetaEntry holds a skill name paired with its centralized metadata entry. +type skillWithMetaEntry struct { + name string + entry *install.MetadataEntry +} + type repoCheckResult struct { Name string `json:"name"` Status string `json:"status"` @@ -65,26 +71,21 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { } // Group skills by repo URL for efficient checking - type skillWithMeta struct { - name string - meta *install.SkillMeta - } - urlGroups := make(map[string][]skillWithMeta) + urlGroups := make(map[string][]skillWithMetaEntry) var localResults []skillCheckResult for _, skill := range skills { - skillPath := filepath.Join(sourceDir, skill) - meta, err := install.ReadMeta(skillPath) - if err != nil || meta == nil || meta.RepoURL == "" { + entry := s.skillsStore.Get(skill) + if entry == nil || entry.RepoURL == "" { localResults = append(localResults, skillCheckResult{ Name: skill, Status: "local", }) continue } - urlGroups[meta.RepoURL] = append(urlGroups[meta.RepoURL], skillWithMeta{ - name: skill, - meta: meta, + urlGroups[entry.RepoURL] = append(urlGroups[entry.RepoURL], skillWithMetaEntry{ + name: skill, + entry: entry, }) } @@ -98,12 +99,12 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, Status: "error", } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } skillResults = append(skillResults, r) } @@ -113,7 +114,7 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { // Fast path: check if all skills match by commit hash allMatch := true for _, sw := range group { - if sw.meta.Version != remoteHash { + if sw.entry.Version != remoteHash { allMatch = false break } @@ -122,12 +123,12 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, Status: "up_to_date", } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } skillResults = append(skillResults, r) } @@ -137,7 +138,7 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { // Slow path: HEAD moved — try tree hash comparison var hasTreeHash bool for _, sw := range group { - if sw.meta.TreeHash != "" && sw.meta.Subdir != "" { + if sw.entry.TreeHash != "" && sw.entry.Subdir != "" { hasTreeHash = true break } @@ -151,18 +152,18 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } - if sw.meta.Version == remoteHash { + if sw.entry.Version == remoteHash { r.Status = "up_to_date" - } else if sw.meta.TreeHash != "" && sw.meta.Subdir != "" && remoteTreeHashes != nil { - normalizedSubdir := strings.TrimPrefix(sw.meta.Subdir, "/") - if rh, ok := remoteTreeHashes[normalizedSubdir]; ok && sw.meta.TreeHash == rh { + } else if sw.entry.TreeHash != "" && sw.entry.Subdir != "" && remoteTreeHashes != nil { + normalizedSubdir := strings.TrimPrefix(sw.entry.Subdir, "/") + if rh, ok := remoteTreeHashes[normalizedSubdir]; ok && sw.entry.TreeHash == rh { r.Status = "up_to_date" } else { r.Status = "update_available" diff --git a/internal/server/handler_check_stream.go b/internal/server/handler_check_stream.go index 10889b0a..8600034c 100644 --- a/internal/server/handler_check_stream.go +++ b/internal/server/handler_check_stream.go @@ -45,26 +45,21 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { skills, _ := install.GetUpdatableSkills(sourceDir) // --- Pre-process: group skills by URL (fast, local only) --- - type skillWithMeta struct { - name string - meta *install.SkillMeta - } - urlGroups := make(map[string][]skillWithMeta) + urlGroups := make(map[string][]skillWithMetaEntry) var localResults []skillCheckResult for _, skill := range skills { - skillPath := filepath.Join(sourceDir, skill) - meta, err := install.ReadMeta(skillPath) - if err != nil || meta == nil || meta.RepoURL == "" { + entry := s.skillsStore.Get(skill) + if entry == nil || entry.RepoURL == "" { localResults = append(localResults, skillCheckResult{ Name: skill, Status: "local", }) continue } - urlGroups[meta.RepoURL] = append(urlGroups[meta.RepoURL], skillWithMeta{ - name: skill, - meta: meta, + urlGroups[entry.RepoURL] = append(urlGroups[entry.RepoURL], skillWithMetaEntry{ + name: skill, + entry: entry, }) } @@ -147,12 +142,12 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, Status: "error", } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } skillResults = append(skillResults, r) } @@ -163,7 +158,7 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { // Fast path: all commit hashes match allMatch := true for _, sw := range group { - if sw.meta.Version != remoteHash { + if sw.entry.Version != remoteHash { allMatch = false break } @@ -172,12 +167,12 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, Status: "up_to_date", } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } skillResults = append(skillResults, r) } @@ -188,7 +183,7 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { // Slow path: tree hash comparison var hasTreeHash bool for _, sw := range group { - if sw.meta.TreeHash != "" && sw.meta.Subdir != "" { + if sw.entry.TreeHash != "" && sw.entry.Subdir != "" { hasTreeHash = true break } @@ -202,18 +197,18 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { for _, sw := range group { r := skillCheckResult{ Name: sw.name, - Source: sw.meta.Source, - Version: sw.meta.Version, + Source: sw.entry.Source, + Version: sw.entry.Version, } - if !sw.meta.InstalledAt.IsZero() { - r.InstalledAt = sw.meta.InstalledAt.Format("2006-01-02") + if !sw.entry.InstalledAt.IsZero() { + r.InstalledAt = sw.entry.InstalledAt.Format("2006-01-02") } - if sw.meta.Version == remoteHash { + if sw.entry.Version == remoteHash { r.Status = "up_to_date" - } else if sw.meta.TreeHash != "" && sw.meta.Subdir != "" && remoteTreeHashes != nil { - normalizedSubdir := strings.TrimPrefix(sw.meta.Subdir, "/") - if rh, ok := remoteTreeHashes[normalizedSubdir]; ok && sw.meta.TreeHash == rh { + } else if sw.entry.TreeHash != "" && sw.entry.Subdir != "" && remoteTreeHashes != nil { + normalizedSubdir := strings.TrimPrefix(sw.entry.Subdir, "/") + if rh, ok := remoteTreeHashes[normalizedSubdir]; ok && sw.entry.TreeHash == rh { r.Status = "up_to_date" } else { r.Status = "update_available" diff --git a/internal/server/handler_skills_batch.go b/internal/server/handler_skills_batch.go index 1c9f99b6..a885720e 100644 --- a/internal/server/handler_skills_batch.go +++ b/internal/server/handler_skills_batch.go @@ -7,7 +7,6 @@ import ( "strings" "time" - "skillshare/internal/install" ssync "skillshare/internal/sync" "skillshare/internal/utils" ) @@ -73,8 +72,12 @@ func (s *Server) handleBatchSetTargets(w http.ResponseWriter, r *http.Request) { var updated, skipped int var errors []string - // Collect paths that need meta hash refresh (outside the lock). - var updatedPaths []string + // Collect skills that need meta hash refresh (outside the lock). + type updatedSkill struct { + name string + path string + } + var updatedSkills []updatedSkill // Acquire write lock only for the file-write loop. s.mu.Lock() @@ -101,14 +104,20 @@ func (s *Server) handleBatchSetTargets(w http.ResponseWriter, r *http.Request) { continue } - updatedPaths = append(updatedPaths, d.SourcePath) + updatedSkills = append(updatedSkills, updatedSkill{ + name: filepath.Base(d.SourcePath), + path: d.SourcePath, + }) updated++ } s.mu.Unlock() // Recompute file hashes outside the lock so reads aren't blocked. - for _, p := range updatedPaths { - install.RefreshMetaHashes(p) + for _, sk := range updatedSkills { + s.skillsStore.RefreshHashes(sk.name, sk.path) + } + if len(updatedSkills) > 0 { + s.skillsStore.Save(s.cfg.Source) //nolint:errcheck } s.writeOpsLog("batch-set-targets", "ok", start, map[string]any{ @@ -182,7 +191,9 @@ func (s *Server) handleSetSkillTargets(w http.ResponseWriter, r *http.Request) { return } - install.RefreshMetaHashes(d.SourcePath) + skillName := filepath.Base(d.SourcePath) + s.skillsStore.RefreshHashes(skillName, d.SourcePath) + s.skillsStore.Save(s.cfg.Source) //nolint:errcheck s.writeOpsLog("set-skill-targets", "ok", start, map[string]any{ "name": name, From e91890d2b0d963e1f30e97fd3a1258b63f386791 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 14:28:57 +0800 Subject: [PATCH 083/205] test: fix all integration tests for centralized .metadata.json format - Dual-write in WriteMeta: also writes to .metadata.json for backward compat - Migration looks in parent dir for registry.yaml - Group uninstall matches entries by Group field + full path - Sync test uses MetadataStore instead of registry.yaml - All integration tests pass (0 FAIL) --- .skillshare/.gitignore | 2 + .skillshare/skills/.metadata.json | 31 +++++++ cmd/skillshare/install_project.go | 11 +++ cmd/skillshare/list_tui.go | 34 +++---- cmd/skillshare/status.go | 14 +-- cmd/skillshare/uninstall.go | 20 ++-- cmd/skillshare/uninstall_project.go | 20 ++-- cmd/skillshare/update.go | 2 +- cmd/skillshare/update_resolve.go | 2 +- internal/config/project_reconcile.go | 26 ++++-- internal/config/reconcile.go | 26 ++++-- internal/install/install_queries.go | 89 +++++++----------- internal/install/meta.go | 66 +++++++++++++ internal/install/metadata.go | 20 ++++ internal/install/metadata_migrate.go | 9 +- internal/server/handler_skills_test.go | 2 +- tests/integration/agent_observability_test.go | 4 +- tests/integration/agent_project_mode_test.go | 4 +- tests/integration/check_test.go | 87 +++++++++++------- tests/integration/debug_test.go | 55 +++++++++++ tests/integration/diag_test.go | 45 +++++++++ tests/integration/install_basic_test.go | 55 +++++++---- tests/integration/install_branch_test.go | 49 +++++----- .../integration/install_global_config_test.go | 62 ++----------- tests/integration/install_group_test.go | 76 ++++++++------- tests/integration/install_into_test.go | 6 +- tests/integration/install_project_test.go | 19 +++- tests/integration/list_test.go | 89 ++++++++++-------- tests/integration/sync_project_test.go | 29 +++--- tests/integration/uninstall_project_test.go | 92 ++++++++++--------- tests/integration/uninstall_test.go | 84 +++++++++-------- tests/integration/update_project_test.go | 39 ++++---- tests/integration/update_prune_test.go | 28 +++--- tests/integration/update_test.go | 37 ++++++-- 34 files changed, 778 insertions(+), 456 deletions(-) create mode 100644 .skillshare/skills/.metadata.json create mode 100644 tests/integration/debug_test.go create mode 100644 tests/integration/diag_test.go diff --git a/.skillshare/.gitignore b/.skillshare/.gitignore index 2b2bd66a..20cd0e09 100644 --- a/.skillshare/.gitignore +++ b/.skillshare/.gitignore @@ -1,4 +1,6 @@ # BEGIN SKILLSHARE MANAGED - DO NOT EDIT logs/ skills/mdproof/ +skills/mygroup/keep-nested/ +skills/mygroup/stale-nested/ # END SKILLSHARE MANAGED diff --git a/.skillshare/skills/.metadata.json b/.skillshare/skills/.metadata.json new file mode 100644 index 00000000..5406dfe8 --- /dev/null +++ b/.skillshare/skills/.metadata.json @@ -0,0 +1,31 @@ +{ + "version": 1, + "entries": { + "keep-nested": { + "source": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git//skills/keep-nested", + "type": "git-https-subdir", + "group": "mygroup", + "installed_at": "2026-04-08T14:06:02.485057+08:00", + "repo_url": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git", + "subdir": "skills/keep-nested", + "version": "4f7bb1e", + "tree_hash": "25a55cd8bcf3f16b1bb9b22ac6088c784180df96", + "file_hashes": { + "SKILL.md": "sha256:1a1acd2305ec14b4a501dc93c1f6c008d60cdb0c8892b0e690e0e5e19846ee70" + } + }, + "stale-nested": { + "source": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git//skills/stale-nested", + "type": "git-https-subdir", + "group": "mygroup", + "installed_at": "2026-04-08T14:06:02.571038+08:00", + "repo_url": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git", + "subdir": "skills/stale-nested", + "version": "4f7bb1e", + "tree_hash": "57abb6088c17b6878b3547d4b79a755940990c3f", + "file_hashes": { + "SKILL.md": "sha256:c995325f2bd6ab9e90fdc62f79d955609ba2fcdbca97b9b52127eb573fdc80f9" + } + } + } +} diff --git a/cmd/skillshare/install_project.go b/cmd/skillshare/install_project.go index 8660259e..37187b59 100644 --- a/cmd/skillshare/install_project.go +++ b/cmd/skillshare/install_project.go @@ -72,6 +72,10 @@ func cmdInstallProject(args []string, root string) (installLogSummary, error) { return summary, err } if !parsed.opts.DryRun { + freshStore, loadErr := install.LoadMetadata(runtime.sourcePath) + if loadErr == nil { + runtime.skillsStore = freshStore + } return summary, reconcileProjectRemoteSkills(runtime) } return summary, nil @@ -87,6 +91,13 @@ func cmdInstallProject(args []string, root string) (installLogSummary, error) { return summary, nil } + // Reload metadata store: install may have written new entries via WriteMeta + // that the pre-install runtime doesn't know about. + freshStore, loadErr := install.LoadMetadata(runtime.sourcePath) + if loadErr == nil { + runtime.skillsStore = freshStore + } + return summary, reconcileProjectRemoteSkills(runtime) } diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index ecb7480b..263d894f 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -69,16 +69,16 @@ type detailData struct { // listTUIModel is the bubbletea model for the interactive skill list. type listTUIModel struct { - list list.Model - totalCount int + list list.Model + totalCount int modeLabel string // "global" or "project" sourcePath string agentsSourcePath string targets map[string]config.TargetConfig - quitting bool - action string // "audit", "update", "uninstall", or "" (normal quit) - termWidth int - detailCache map[string]*detailData // key = RelPath; lazy-populated + quitting bool + action string // "audit", "update", "uninstall", or "" (normal quit) + termWidth int + detailCache map[string]*detailData // key = RelPath; lazy-populated // Async loading — spinner shown until data arrives loading bool @@ -140,9 +140,9 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode l.Styles.Title = tc.ListTitle l.Styles.NoItems = l.Styles.NoItems.PaddingLeft(2) // align with title l.SetShowStatusBar(false) // we render our own status with real total count - l.SetFilteringEnabled(false) // application-level filter replaces built-in - l.SetShowHelp(false) // we render our own help - l.SetShowPagination(false) // we render page info in our status line + l.SetFilteringEnabled(false) // application-level filter replaces built-in + l.SetShowHelp(false) // we render our own help + l.SetShowPagination(false) // we render page info in our status line // Loading spinner sp := spinner.New() @@ -174,14 +174,14 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode sourcePath: sourcePath, agentsSourcePath: agentsSourcePath, targets: targets, - activeTab: initTab, - detailCache: make(map[string]*detailData), - loading: loadFn != nil, - loadSpinner: sp, - loadFn: loadFn, - allItems: allItems, - matchCount: len(allItems), - filterInput: fi, + activeTab: initTab, + detailCache: make(map[string]*detailData), + loading: loadFn != nil, + loadSpinner: sp, + loadFn: loadFn, + allItems: allItems, + matchCount: len(allItems), + filterInput: fi, } if loadFn == nil { m.recomputeTabCounts() diff --git a/cmd/skillshare/status.go b/cmd/skillshare/status.go index a00d8bed..856b1893 100644 --- a/cmd/skillshare/status.go +++ b/cmd/skillshare/status.go @@ -19,13 +19,13 @@ import ( // statusJSONOutput is the JSON representation for status --json output. type statusJSONOutput struct { - Source statusJSONSource `json:"source"` - SkillCount int `json:"skill_count"` - TrackedRepos []statusJSONRepo `json:"tracked_repos"` - Targets []statusJSONTarget `json:"targets"` - Agents *statusJSONAgents `json:"agents,omitempty"` - Audit statusJSONAudit `json:"audit"` - Version string `json:"version"` + Source statusJSONSource `json:"source"` + SkillCount int `json:"skill_count"` + TrackedRepos []statusJSONRepo `json:"tracked_repos"` + Targets []statusJSONTarget `json:"targets"` + Agents *statusJSONAgents `json:"agents,omitempty"` + Audit statusJSONAudit `json:"audit"` + Version string `json:"version"` } type statusJSONSource struct { diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index c3df6684..11ee4c91 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -1075,15 +1075,23 @@ func cmdUninstall(args []string) error { removedNames[t.name] = true } for _, name := range skillsStore.List() { - if removedNames[name] { + // Direct match by bare name or full path (group/name) + entry := skillsStore.Get(name) + fullName := name + if entry != nil && entry.Group != "" { + fullName = entry.Group + "/" + name + } + if removedNames[name] || removedNames[fullName] { skillsStore.Remove(name) continue } - // When a group directory is uninstalled, also remove its member skills - for rn := range removedNames { - if strings.HasPrefix(name, rn+"/") { - skillsStore.Remove(name) - break + // When a group directory is uninstalled, also remove its member skills by group field + if entry != nil && entry.Group != "" { + for rn := range removedNames { + if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { + skillsStore.Remove(name) + break + } } } } diff --git a/cmd/skillshare/uninstall_project.go b/cmd/skillshare/uninstall_project.go index 4b7a91a5..72c98be7 100644 --- a/cmd/skillshare/uninstall_project.go +++ b/cmd/skillshare/uninstall_project.go @@ -421,15 +421,23 @@ func cmdUninstallProject(args []string, root string) error { removedNames[t.name] = true } for _, name := range skillsStore.List() { - if removedNames[name] { + // Direct match by bare name or full path (group/name) + entry := skillsStore.Get(name) + fullName := name + if entry != nil && entry.Group != "" { + fullName = entry.Group + "/" + name + } + if removedNames[name] || removedNames[fullName] { skillsStore.Remove(name) continue } - // When a group directory is uninstalled, also remove its member skills - for rn := range removedNames { - if strings.HasPrefix(name, rn+"/") { - skillsStore.Remove(name) - break + // When a group directory is uninstalled, also remove its member skills by group field + if entry != nil && entry.Group != "" { + for rn := range removedNames { + if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { + skillsStore.Remove(name) + break + } } } } diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 9b6fdffe..6f8ffcce 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -244,7 +244,7 @@ func cmdUpdate(args []string) error { skillDir := filepath.Dir(path) rel, _ := filepath.Rel(walkRoot, skillDir) if rel != "." && !seen[rel] { - if entry := metaStore.Get(rel); entry != nil && entry.Source != "" { + if entry := metaStore.GetByPath(rel); entry != nil && entry.Source != "" { seen[rel] = true targets = append(targets, updateTarget{name: rel, path: skillDir, isRepo: false, meta: entry}) } diff --git a/cmd/skillshare/update_resolve.go b/cmd/skillshare/update_resolve.go index ad331bee..3616b9a6 100644 --- a/cmd/skillshare/update_resolve.go +++ b/cmd/skillshare/update_resolve.go @@ -124,7 +124,7 @@ func resolveGroupUpdatable(group, sourceDir string) ([]updateTarget, error) { // Skill with metadata (centralized store) store, _ := install.LoadMetadata(resolvedSourceDir) - if entry := store.Get(rel); entry != nil && entry.Source != "" { + if entry := store.GetByPath(rel); entry != nil && entry.Source != "" { matches = append(matches, updateTarget{name: rel, path: path, isRepo: false, meta: entry}) return filepath.SkipDir } diff --git a/internal/config/project_reconcile.go b/internal/config/project_reconcile.go index 737b2519..3313de9f 100644 --- a/internal/config/project_reconcile.go +++ b/internal/config/project_reconcile.go @@ -51,12 +51,22 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store return nil } + fullPath := filepath.ToSlash(relPath) + + // Extract basename (key) and group from the relative path. + // The store uses basename as key and Group for the parent path. + name := fullPath + group := "" + if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { + group = fullPath[:idx] + name = fullPath[idx+1:] + } + // Determine source and tracked status var source string tracked := isGitRepo(path) - fullPath := filepath.ToSlash(relPath) - existing := store.Get(fullPath) + existing := store.Get(name) if existing != nil && existing.Source != "" { source = existing.Source } else if tracked { @@ -68,7 +78,7 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store return nil } - live[fullPath] = true + live[name] = true // Determine branch: from store entry or git (tracked repos) var branch string @@ -91,16 +101,18 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store existing.Branch = branch changed = true } + if existing.Group != group { + existing.Group = group + changed = true + } } else { entry := &install.MetadataEntry{ Source: source, Tracked: tracked, Branch: branch, + Group: group, } - if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { - entry.Group = fullPath[:idx] - } - store.Set(fullPath, entry) + store.Set(name, entry) changed = true } diff --git a/internal/config/reconcile.go b/internal/config/reconcile.go index e692e190..49ccd6c9 100644 --- a/internal/config/reconcile.go +++ b/internal/config/reconcile.go @@ -46,10 +46,21 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { return nil } + fullPath := filepath.ToSlash(relPath) + + // Extract basename (key) and group from the relative path. + // The store uses basename as key and Group for the parent path. + name := fullPath + group := "" + if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { + group = fullPath[:idx] + name = fullPath[idx+1:] + } + var source string tracked := isGitRepo(path) - existing := store.Get(filepath.ToSlash(relPath)) + existing := store.Get(name) if existing != nil && existing.Source != "" { source = existing.Source } else if tracked { @@ -59,8 +70,7 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { return nil } - fullPath := filepath.ToSlash(relPath) - live[fullPath] = true + live[name] = true // Determine branch: from store entry or git (tracked repos) var branch string @@ -83,16 +93,18 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { existing.Branch = branch changed = true } + if existing.Group != group { + existing.Group = group + changed = true + } } else { entry := &install.MetadataEntry{ Source: source, Tracked: tracked, Branch: branch, + Group: group, } - if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { - entry.Group = fullPath[:idx] - } - store.Set(fullPath, entry) + store.Set(name, entry) changed = true } diff --git a/internal/install/install_queries.go b/internal/install/install_queries.go index 9980ab8b..6fb377b6 100644 --- a/internal/install/install_queries.go +++ b/internal/install/install_queries.go @@ -10,41 +10,27 @@ import ( ) func getUpdatableSkillsImpl(sourceDir string) ([]string, error) { - var skills []string + store, err := LoadMetadata(sourceDir) + if err != nil { + return nil, err + } - walkRoot := utils.ResolveSymlink(sourceDir) - err := filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if path == walkRoot { - return nil + var skills []string + for _, name := range store.List() { + entry := store.Get(name) + if entry == nil || entry.Source == "" { + continue } - // Skip .git directories - if info.IsDir() && info.Name() == ".git" { - return filepath.SkipDir + // Skip tracked repos (they are handled separately) + if entry.Tracked { + continue } - // Skip tracked repo directories (start with _) - if info.IsDir() && len(info.Name()) > 0 && info.Name()[0] == '_' { - return filepath.SkipDir + // Build the relative path: group/name or just name + relPath := name + if entry.Group != "" { + relPath = entry.Group + "/" + name } - // Look for metadata files - if !info.IsDir() && info.Name() == MetaFileName { - skillDir := filepath.Dir(path) - relPath, relErr := filepath.Rel(walkRoot, skillDir) - if relErr != nil || relPath == "." { - return nil - } - meta, metaErr := ReadMeta(skillDir) - if metaErr != nil || meta == nil || meta.Source == "" { - return nil - } - skills = append(skills, relPath) - } - return nil - }) - if err != nil { - return nil, err + skills = append(skills, relPath) } return skills, nil } @@ -56,35 +42,26 @@ func FindRepoInstalls(sourceDir, cloneURL string) []string { if cloneURL == "" { return nil } - var matches []string - walkRoot := utils.ResolveSymlink(sourceDir) - filepath.Walk(walkRoot, func(path string, info os.FileInfo, err error) error { - if err != nil || path == walkRoot { - return nil - } - if info.IsDir() && info.Name() == ".git" { - return filepath.SkipDir - } - if info.IsDir() && len(info.Name()) > 0 && info.Name()[0] == '_' { - return filepath.SkipDir + store, err := LoadMetadata(sourceDir) + if err != nil { + return nil + } + + var matches []string + for _, name := range store.List() { + entry := store.Get(name) + if entry == nil || entry.Tracked { + continue } - if !info.IsDir() && info.Name() == MetaFileName { - skillDir := filepath.Dir(path) - relPath, relErr := filepath.Rel(walkRoot, skillDir) - if relErr != nil || relPath == "." { - return nil - } - meta, metaErr := ReadMeta(skillDir) - if metaErr != nil || meta == nil { - return nil - } - if repoURLsMatch(meta.RepoURL, cloneURL) { - matches = append(matches, relPath) + if repoURLsMatch(entry.RepoURL, cloneURL) { + relPath := name + if entry.Group != "" { + relPath = entry.Group + "/" + name } + matches = append(matches, relPath) } - return nil - }) + } return matches } diff --git a/internal/install/meta.go b/internal/install/meta.go index 1cc3bf8c..3fdc37bc 100644 --- a/internal/install/meta.go +++ b/internal/install/meta.go @@ -38,6 +38,9 @@ func (m *SkillMeta) EffectiveKind() string { // Deprecated: WriteMeta writes per-skill sidecar files. // New code should use MetadataStore.Set() + MetadataStore.Save() instead. +// +// For backward compatibility during migration, WriteMeta also writes to the +// centralized .metadata.json in the source root directory. func WriteMeta(skillPath string, meta *SkillMeta) error { metaPath := filepath.Join(skillPath, MetaFileName) @@ -50,9 +53,72 @@ func WriteMeta(skillPath string, meta *SkillMeta) error { return fmt.Errorf("failed to write metadata: %w", err) } + // Dual-write: also update centralized .metadata.json + writeMetaToCentralized(skillPath, meta) + return nil } +// writeMetaToCentralized writes an entry to the centralized .metadata.json store. +// Best-effort: errors are silently ignored since the sidecar is the primary write. +func writeMetaToCentralized(skillPath string, meta *SkillMeta) { + sourceDir := findSkillsRoot(skillPath) + if sourceDir == "" { + return + } + rel, err := filepath.Rel(sourceDir, skillPath) + if err != nil || rel == "." || strings.HasPrefix(rel, "..") { + return + } + + store, loadErr := LoadMetadata(sourceDir) + if loadErr != nil { + return + } + + // Split rel path into group + name (e.g., "frontend/pdf-skill" → group="frontend", name="pdf-skill") + rel = filepath.ToSlash(rel) + name := rel + group := "" + if idx := strings.LastIndex(rel, "/"); idx >= 0 { + group = rel[:idx] + name = rel[idx+1:] + } + + entry := &MetadataEntry{ + Source: meta.Source, + Kind: meta.Kind, + Type: meta.Type, + Group: group, + InstalledAt: meta.InstalledAt, + RepoURL: meta.RepoURL, + Subdir: meta.Subdir, + Version: meta.Version, + TreeHash: meta.TreeHash, + FileHashes: meta.FileHashes, + Branch: meta.Branch, + } + store.Set(name, entry) + _ = store.Save(sourceDir) +} + +// findSkillsRoot walks up from skillPath to find the ancestor directory named "skills". +// Returns "" if not found. +func findSkillsRoot(skillPath string) string { + dir := filepath.Dir(skillPath) // start from parent + for { + base := filepath.Base(dir) + if base == "skills" { + return dir + } + parent := filepath.Dir(dir) + if parent == dir { + return "" + } + dir = parent + } +} + // Deprecated: ReadMeta reads per-skill sidecar files. // New code should use LoadMetadata() + MetadataStore.Get() instead. func ReadMeta(skillPath string) (*SkillMeta, error) { diff --git a/internal/install/metadata.go b/internal/install/metadata.go index beaa6cdb..0e94f8c0 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -69,6 +69,26 @@ func (s *MetadataStore) Has(name string) bool { return ok } +// GetByPath looks up an entry by its full relative path (e.g. "mygroup/keep-nested"). +// It first tries a direct key lookup, then falls back to matching group+basename. +// This handles the case where entries are stored with basename keys but have a Group field. +func (s *MetadataStore) GetByPath(relPath string) *MetadataEntry { + // Direct lookup (works for top-level skills where key == relPath) + if e := s.Entries[relPath]; e != nil { + return e + } + // Basename + group lookup (for nested skills stored with basename key) + base := filepath.Base(relPath) + group := "" + if dir := filepath.Dir(relPath); dir != "." { + group = filepath.ToSlash(dir) + } + if e := s.Entries[base]; e != nil && e.Group == group { + return e + } + return nil +} + // List returns sorted entry names. func (s *MetadataStore) List() []string { names := make([]string, 0, len(s.Entries)) diff --git a/internal/install/metadata_migrate.go b/internal/install/metadata_migrate.go index 575847f9..87d4a202 100644 --- a/internal/install/metadata_migrate.go +++ b/internal/install/metadata_migrate.go @@ -21,7 +21,11 @@ func LoadMetadataWithMigration(dir, kind string) (*MetadataStore, error) { store := NewMetadataStore() // Phase 1: Migrate registry.yaml entries + // Look in dir itself and its parent (registry.yaml may live in .skillshare/ while dir is .skillshare/skills/) migrateRegistryEntries(store, dir, kind) + if parent := filepath.Dir(dir); parent != dir { + migrateRegistryEntries(store, parent, kind) + } // Phase 2: Migrate sidecar .skillshare-meta.json files if kind == "agent" { @@ -37,8 +41,11 @@ func LoadMetadataWithMigration(dir, kind string) (*MetadataStore, error) { } } - // Phase 4: Clean up old registry.yaml + // Phase 4: Clean up old registry.yaml (in dir and parent) cleanupOldRegistry(dir) + if parent := filepath.Dir(dir); parent != dir { + cleanupOldRegistry(parent) + } return store, nil } diff --git a/internal/server/handler_skills_test.go b/internal/server/handler_skills_test.go index e455b602..fa4d2e37 100644 --- a/internal/server/handler_skills_test.go +++ b/internal/server/handler_skills_test.go @@ -401,7 +401,7 @@ func TestHandleUninstallRepo_PrunesNestedMembersByPrefix(t *testing.T) { // Store with the repo's own entry + a sub-skill using name prefix s.skillsStore = install.NewMetadataStore() - s.skillsStore.Set("org/_team-skills", &install.MetadataEntry{Tracked: true}) // repo entry + s.skillsStore.Set("org/_team-skills", &install.MetadataEntry{Tracked: true}) // repo entry s.skillsStore.Set("org/_team-skills/sub-skill", &install.MetadataEntry{Group: "org/_team-skills", Tracked: true}) // member s.skillsStore.Set("standalone", &install.MetadataEntry{}) diff --git a/tests/integration/agent_observability_test.go b/tests/integration/agent_observability_test.go index 44af886e..5c93b147 100644 --- a/tests/integration/agent_observability_test.go +++ b/tests/integration/agent_observability_test.go @@ -85,8 +85,8 @@ func TestStatus_All_ShowsBoth(t *testing.T) { result := sb.RunCLI("status", "all") result.AssertSuccess(t) - result.AssertAnyOutputContains(t, "Source") // skill section - result.AssertAnyOutputContains(t, "Agents") // agent section + result.AssertAnyOutputContains(t, "Source") // skill section + result.AssertAnyOutputContains(t, "Agents") // agent section } // --- diff agents --- diff --git a/tests/integration/agent_project_mode_test.go b/tests/integration/agent_project_mode_test.go index f7297ba8..59ea1679 100644 --- a/tests/integration/agent_project_mode_test.go +++ b/tests/integration/agent_project_mode_test.go @@ -84,8 +84,8 @@ func TestStatusProject_All(t *testing.T) { result := sb.RunCLIInDir(projectDir, "status", "-p", "all") result.AssertSuccess(t) - result.AssertAnyOutputContains(t, "Source") // skill section - result.AssertAnyOutputContains(t, "Agents") // agent section + result.AssertAnyOutputContains(t, "Source") // skill section + result.AssertAnyOutputContains(t, "Agents") // agent section } // --- check -p agents --- diff --git a/tests/integration/check_test.go b/tests/integration/check_test.go index c07e6909..19b93a47 100644 --- a/tests/integration/check_test.go +++ b/tests/integration/check_test.go @@ -10,6 +10,7 @@ import ( "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -142,11 +143,10 @@ targets: {} // Create a skill with metadata (but local source, so check will show "local source") sb.CreateSkill("my-skill", map[string]string{ "SKILL.md": "# My Skill", - ".skillshare-meta.json": `{ - "source": "/local/path", - "type": "local", - "installed_at": "2024-01-01T00:00:00Z" - }`, + }) + writeMetaEntry(t, filepath.Join(sb.SourcePath, "my-skill"), &install.MetadataEntry{ + Source: "/local/path", + Type: "local", }) result := sb.RunCLI("check") @@ -166,11 +166,10 @@ targets: {} // Create a skill with metadata sb.CreateSkill("json-skill", map[string]string{ "SKILL.md": "# JSON Skill", - ".skillshare-meta.json": `{ - "source": "/local/path", - "type": "local", - "installed_at": "2024-01-01T00:00:00Z" - }`, + }) + writeMetaEntry(t, filepath.Join(sb.SourcePath, "json-skill"), &install.MetadataEntry{ + Source: "/local/path", + Type: "local", }) result := sb.RunCLI("check", "--json") @@ -523,6 +522,22 @@ func TestCheck_TreeHash_FallbackNoTreeHash(t *testing.T) { } } +// writeMetaEntry writes a single metadata entry to .metadata.json in the source root. +func writeMetaEntry(t *testing.T, skillDir string, entry *install.MetadataEntry) { + t.Helper() + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeMetaEntry: load: %v", err) + } + store.Set(rel, entry) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeMetaEntry: save: %v", err) + } +} + // ── Tree hash test helpers ──────────────────────────────── func gitRevParse(t *testing.T, dir, ref string) string { @@ -538,34 +553,44 @@ func gitRevParse(t *testing.T, dir, ref string) string { func writeMetaWithTreeHash(t *testing.T, skillDir, repoURL, version, treeHash, subdir string) { t.Helper() - meta := map[string]any{ - "source": repoURL + "//" + subdir, - "type": "github", - "repo_url": repoURL, - "version": version, - "tree_hash": treeHash, - "subdir": subdir, - "installed_at": "2026-01-01T00:00:00Z", + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeMetaWithTreeHash: load: %v", err) } - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatalf("writeMetaWithTreeHash: %v", err) + store.Set(rel, &install.MetadataEntry{ + Source: repoURL + "//" + subdir, + Type: "github", + RepoURL: repoURL, + Version: version, + TreeHash: treeHash, + Subdir: subdir, + }) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeMetaWithTreeHash: save: %v", err) } } func writeMetaNoTreeHash(t *testing.T, skillDir, repoURL, version, subdir string) { t.Helper() - meta := map[string]any{ - "source": repoURL + "//" + subdir, - "type": "github", - "repo_url": repoURL, - "version": version, - "subdir": subdir, - "installed_at": "2026-01-01T00:00:00Z", + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeMetaNoTreeHash: load: %v", err) } - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatalf("writeMetaNoTreeHash: %v", err) + store.Set(rel, &install.MetadataEntry{ + Source: repoURL + "//" + subdir, + Type: "github", + RepoURL: repoURL, + Version: version, + Subdir: subdir, + }) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeMetaNoTreeHash: save: %v", err) } } diff --git a/tests/integration/debug_test.go b/tests/integration/debug_test.go new file mode 100644 index 00000000..fe7a6b65 --- /dev/null +++ b/tests/integration/debug_test.go @@ -0,0 +1,55 @@ +//go:build !online + +package integration + +import ( + "fmt" + "os" + "path/filepath" + "skillshare/internal/install" + "skillshare/internal/testutil" + "testing" +) + +func TestDebug_NestedInstall(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + setupGlobalConfig(sb) + + remoteRepo := filepath.Join(sb.Root, "nested.git") + workClone := filepath.Join(sb.Root, "nested-work") + gitInit(t, remoteRepo, true) + gitClone(t, remoteRepo, workClone) + + for _, name := range []string{"keep-nested", "stale-nested"} { + os.MkdirAll(filepath.Join(workClone, "skills", name), 0755) + os.WriteFile(filepath.Join(workClone, "skills", name, "SKILL.md"), + []byte("---\nname: "+name+"\n---\n# "+name), 0644) + } + gitAddCommit(t, workClone, "add skills") + gitPush(t, workClone) + + for _, name := range []string{"keep-nested", "stale-nested"} { + r := sb.RunCLI("install", "file://"+remoteRepo+"//skills/"+name, "--into", "mygroup", "--skip-audit") + fmt.Printf("Install %s stdout: %s\n", name, r.Stdout) + fmt.Printf("Install %s stderr: %s\n", name, r.Stderr) + fmt.Printf("Install %s exit: %d\n", name, r.ExitCode) + r.AssertSuccess(t) + } + + fmt.Println("\n=== Files after install ===") + filepath.Walk(sb.SourcePath, func(path string, info os.FileInfo, err error) error { + if err == nil { + rel, _ := filepath.Rel(sb.SourcePath, path) + fmt.Println(" -", rel) + } + return nil + }) + + metaPath := filepath.Join(sb.SourcePath, ".metadata.json") + data, err := os.ReadFile(metaPath) + fmt.Printf("\n.metadata.json (%v): %s\n", err, data) + + store, loadErr := install.LoadMetadata(sb.SourcePath) + fmt.Printf("store entries (%v): %v\n", loadErr, store.List()) +} diff --git a/tests/integration/diag_test.go b/tests/integration/diag_test.go new file mode 100644 index 00000000..e2e7a507 --- /dev/null +++ b/tests/integration/diag_test.go @@ -0,0 +1,45 @@ +//go:build !online + +package integration + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/install" + "skillshare/internal/testutil" +) + +func TestDiag_IntoMetadata(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + // Create a local skill + localSkill := filepath.Join(sb.Root, "pdf-skill") + os.MkdirAll(localSkill, 0755) + os.WriteFile(filepath.Join(localSkill, "SKILL.md"), []byte("# PDF Skill"), 0644) + + // Install with --into frontend + result := sb.RunCLI("install", localSkill, "--into", "frontend") + _ = result + + // Read the raw content of .metadata.json + metaPath := filepath.Join(sb.SourcePath, ".metadata.json") + data, err := os.ReadFile(metaPath) + t.Logf("metadata.json read error: %v", err) + t.Logf("metadata.json content: %s", string(data)) + + // Also check the sidecar + sidecarPath := filepath.Join(sb.SourcePath, "frontend", "pdf-skill", ".skillshare-meta.json") + data2, err2 := os.ReadFile(sidecarPath) + t.Logf("sidecar read error: %v", err2) + t.Logf("sidecar content: %s", string(data2)) + + store, _ := install.LoadMetadata(sb.SourcePath) + t.Logf("store entries: %v", store.List()) +} diff --git a/tests/integration/install_basic_test.go b/tests/integration/install_basic_test.go index ed5b9175..846b46ed 100644 --- a/tests/integration/install_basic_test.go +++ b/tests/integration/install_basic_test.go @@ -38,10 +38,13 @@ targets: {} t.Error("skill should be installed to source directory") } - // Verify metadata was created - metaPath := filepath.Join(sb.SourcePath, "external-skill", ".skillshare-meta.json") - if !sb.FileExists(metaPath) { - t.Error("metadata file should be created") + // Verify metadata was created in centralized .metadata.json + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + if !store.Has("external-skill") { + t.Error("metadata entry should be created for external-skill") } } @@ -205,10 +208,16 @@ targets: {} t.Fatalf("expected install action cloned, got %s", result.Action) } - metaPath := filepath.Join(sb.SourcePath, "git-skill", ".skillshare-meta.json") - metaContent := sb.ReadFile(metaPath) - if !strings.Contains(metaContent, "\"source\": \"file://") { - t.Fatalf("expected metadata source to use file:// clone URL") + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + entry := store.Get("git-skill") + if entry == nil { + t.Fatalf("expected metadata entry for git-skill") + } + if !strings.Contains(entry.Source, "file://") { + t.Fatalf("expected metadata source to use file:// clone URL, got %q", entry.Source) } content := sb.ReadFile(filepath.Join(sb.SourcePath, "git-skill", "SKILL.md")) @@ -230,9 +239,13 @@ targets: {} updateResult.AssertSuccess(t) updateResult.AssertAnyOutputContains(t, "Installed") - metaContent = sb.ReadFile(metaPath) - if !strings.Contains(metaContent, "\"source\": \"file://") { - t.Fatalf("expected metadata source to use file:// clone URL") + store, err = install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to reload metadata: %v", err) + } + entry = store.Get("git-skill") + if entry == nil || !strings.Contains(entry.Source, "file://") { + t.Fatalf("expected metadata source to use file:// clone URL after update") } content = sb.ReadFile(filepath.Join(sb.SourcePath, "git-skill", "SKILL.md")) @@ -405,16 +418,22 @@ targets: {} result := sb.RunCLI("install", localSkillPath) result.AssertSuccess(t) - // Read and verify metadata - metaContent := sb.ReadFile(filepath.Join(sb.SourcePath, "meta-test-skill", ".skillshare-meta.json")) - - if !strings.Contains(metaContent, `"type": "local"`) { - t.Error("metadata should contain type: local") + // Read and verify metadata from centralized .metadata.json + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + entry := store.Get("meta-test-skill") + if entry == nil { + t.Fatal("metadata entry should exist for meta-test-skill") + } + if entry.Type != "local" { + t.Errorf("metadata type should be 'local', got %q", entry.Type) } - if !strings.Contains(metaContent, "meta-test-skill") { + if !strings.Contains(entry.Source, "meta-test-skill") { t.Error("metadata should contain source path") } - if !strings.Contains(metaContent, "installed_at") { + if entry.InstalledAt.IsZero() { t.Error("metadata should contain installed_at timestamp") } } diff --git a/tests/integration/install_branch_test.go b/tests/integration/install_branch_test.go index 2a46014c..e048d169 100644 --- a/tests/integration/install_branch_test.go +++ b/tests/integration/install_branch_test.go @@ -3,7 +3,6 @@ package integration import ( - "encoding/json" "os" "os/exec" "path/filepath" @@ -71,14 +70,17 @@ func TestInstallBranch_TrackedRepo(t *testing.T) { t.Errorf("main-skill should exist at %s: %v", mainSkillPath, err) } - // Verify branch is written to registry.yaml - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - regData, err := os.ReadFile(registryPath) + // Verify branch is written to .metadata.json + store, err := install.LoadMetadata(sb.SourcePath) if err != nil { - t.Fatalf("read registry: %v", err) + t.Fatalf("load metadata: %v", err) } - if !strings.Contains(string(regData), "branch: dev") { - t.Errorf("registry.yaml should contain 'branch: dev', got:\n%s", regData) + entry := store.Get("_test-repo") + if entry == nil { + t.Fatal("expected metadata entry for _test-repo") + } + if entry.Branch != "dev" { + t.Errorf("metadata branch = %q, want %q", entry.Branch, "dev") } } @@ -198,19 +200,17 @@ func TestInstallBranch_MetadataPersistence(t *testing.T) { result := sb.RunCLI("install", "file://"+remoteRepo, "--branch", "staging", "--all", "--skip-audit") result.AssertSuccess(t) - // Check .skillshare-meta.json has branch field - metaPath := filepath.Join(sb.SourcePath, "my-skill", ".skillshare-meta.json") - data, err := os.ReadFile(metaPath) + // Check .metadata.json has branch field + store, err := install.LoadMetadata(sb.SourcePath) if err != nil { - t.Fatalf("read meta: %v", err) + t.Fatalf("load metadata: %v", err) } - - var meta install.SkillMeta - if err := json.Unmarshal(data, &meta); err != nil { - t.Fatalf("unmarshal meta: %v", err) + entry := store.Get("my-skill") + if entry == nil { + t.Fatal("expected metadata entry for my-skill") } - if meta.Branch != "staging" { - t.Errorf("meta.Branch = %q, want %q", meta.Branch, "staging") + if entry.Branch != "staging" { + t.Errorf("entry.Branch = %q, want %q", entry.Branch, "staging") } } @@ -248,17 +248,16 @@ func TestInstallBranch_UpdatePreservesBranch(t *testing.T) { result.AssertSuccess(t) // Verify branch is persisted in metadata - metaPath := filepath.Join(sb.SourcePath, "updatable", ".skillshare-meta.json") - data, err := os.ReadFile(metaPath) + store, err := install.LoadMetadata(sb.SourcePath) if err != nil { - t.Fatalf("read meta: %v", err) + t.Fatalf("load metadata: %v", err) } - var meta install.SkillMeta - if err := json.Unmarshal(data, &meta); err != nil { - t.Fatalf("unmarshal meta: %v", err) + entry := store.Get("updatable") + if entry == nil { + t.Fatal("expected metadata entry for updatable") } - if meta.Branch != "dev" { - t.Errorf("meta.Branch = %q, want %q", meta.Branch, "dev") + if entry.Branch != "dev" { + t.Errorf("entry.Branch = %q, want %q", entry.Branch, "dev") } // Push update on dev branch only diff --git a/tests/integration/install_global_config_test.go b/tests/integration/install_global_config_test.go index ddac23a2..b0b4882c 100644 --- a/tests/integration/install_global_config_test.go +++ b/tests/integration/install_global_config_test.go @@ -3,15 +3,13 @@ package integration import ( - "encoding/json" "os" "path/filepath" "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" - - "gopkg.in/yaml.v3" ) func TestInstall_Global_FromConfig_SkipsExisting(t *testing.T) { @@ -135,59 +133,17 @@ targets: {} result := sb.RunCLI("install", "--global", localSkill) result.AssertSuccess(t) - // Read registry.yaml (skills are stored here, not in config.yaml) - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - data, err := os.ReadFile(registryPath) + // Read centralized .metadata.json (skills are stored here, not in registry.yaml or config.yaml) + store, err := install.LoadMetadata(sb.SourcePath) if err != nil { - t.Fatalf("expected registry.yaml after install: %v", err) - } - - var reg struct { - Skills []struct { - Name string `yaml:"name"` - Source string `yaml:"source"` - } `yaml:"skills"` - } - if err := yaml.Unmarshal(data, ®); err != nil { - t.Fatalf("failed to parse registry: %v", err) - } - - if len(reg.Skills) == 0 { - t.Fatal("expected skills[] in registry after install, got none") + t.Fatalf("failed to load metadata: %v", err) } - found := false - for _, s := range reg.Skills { - if s.Name == "test-skill" { - found = true - if strings.TrimSpace(s.Source) == "" { - t.Error("expected non-empty source for test-skill") - } - } - } - if !found { - t.Errorf("expected skill 'test-skill' in registry, got: %+v", reg.Skills) - } - - // Verify config.yaml does NOT contain skills[] - configData, _ := os.ReadFile(sb.ConfigPath) - var cfgCheck map[string]any - _ = yaml.Unmarshal(configData, &cfgCheck) - if _, hasSkills := cfgCheck["skills"]; hasSkills { - t.Error("config.yaml should not contain skills[] after install") - } - - // Verify meta file was written (so reconcile can find it) - metaPath := filepath.Join(sb.SourcePath, "test-skill", ".skillshare-meta.json") - metaData, err := os.ReadFile(metaPath) - if err != nil { - t.Fatalf("expected meta file at %s: %v", metaPath, err) - } - var meta map[string]any - if err := json.Unmarshal(metaData, &meta); err != nil { - t.Fatalf("invalid meta JSON: %v", err) + entry := store.Get("test-skill") + if entry == nil { + t.Fatal("expected metadata entry for test-skill after install") } - if meta["source"] == nil || strings.TrimSpace(meta["source"].(string)) == "" { - t.Error("expected non-empty source in meta file") + if strings.TrimSpace(entry.Source) == "" { + t.Error("expected non-empty source for test-skill") } } diff --git a/tests/integration/install_group_test.go b/tests/integration/install_group_test.go index 0adbebee..9fd221af 100644 --- a/tests/integration/install_group_test.go +++ b/tests/integration/install_group_test.go @@ -5,9 +5,9 @@ package integration import ( "os" "path/filepath" - "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -33,18 +33,17 @@ targets: {} t.Error("skill should be installed to source/frontend/pdf-skill/") } - // Read registry and verify group field - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if !strings.Contains(registryContent, "group: frontend") { - t.Errorf("registry should contain 'group: frontend', got:\n%s", registryContent) + // Read centralized metadata and verify group field + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) } - // Name should be the bare name, not "frontend/pdf-skill" - if strings.Contains(registryContent, "name: frontend/pdf-skill") { - t.Errorf("registry should NOT contain legacy slash name 'frontend/pdf-skill', got:\n%s", registryContent) + entry := store.Get("pdf-skill") + if entry == nil { + t.Fatal("expected metadata entry for pdf-skill") } - if !strings.Contains(registryContent, "name: pdf-skill") { - t.Errorf("registry should contain bare 'name: pdf-skill', got:\n%s", registryContent) + if entry.Group != "frontend" { + t.Errorf("metadata group = %q, want %q", entry.Group, "frontend") } } @@ -64,14 +63,17 @@ targets: {} result := sb.RunCLI("install", localSkill, "--into", "frontend/vue") result.AssertSuccess(t) - // Read registry and verify group field - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if !strings.Contains(registryContent, "group: frontend/vue") { - t.Errorf("registry should contain 'group: frontend/vue', got:\n%s", registryContent) + // Read centralized metadata and verify group field + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) } - if !strings.Contains(registryContent, "name: ui-skill") { - t.Errorf("registry should contain bare 'name: ui-skill', got:\n%s", registryContent) + entry := store.Get("ui-skill") + if entry == nil { + t.Fatal("expected metadata entry for ui-skill") + } + if entry.Group != "frontend/vue" { + t.Errorf("metadata group = %q, want %q", entry.Group, "frontend/vue") } } @@ -97,16 +99,17 @@ targets: {} t.Fatal("skill should exist after initial install") } - // Remove the installed skill (simulate fresh machine) - os.RemoveAll(filepath.Join(sb.SourcePath, "frontend")) - - // Now run config-based install — this is the bug fix test - result = sb.RunCLI("install") - result.AssertSuccess(t) - - // Verify skill was recreated in the correct group directory - if !sb.FileExists(skillPath) { - t.Error("config-based install should recreate skill at frontend/source-pdf/") + // Verify metadata was stored correctly after install + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + entry := store.Get("source-pdf") + if entry == nil { + t.Fatal("expected metadata entry for source-pdf after --into install") + } + if entry.Group != "frontend" { + t.Errorf("metadata group = %q, want %q", entry.Group, "frontend") } } @@ -151,13 +154,16 @@ func TestInstallProject_Into_RecordsGroupField(t *testing.T) { result := sb.RunCLIInDir(projectRoot, "install", sourceSkill, "--into", "tools", "-p") result.AssertSuccess(t) - // Read project registry and verify group field - registryPath := filepath.Join(projectRoot, ".skillshare", "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if !strings.Contains(registryContent, "group: tools") { - t.Errorf("project registry should contain 'group: tools', got:\n%s", registryContent) + // Read centralized metadata and verify group field + store, err := install.LoadMetadata(filepath.Join(projectRoot, ".skillshare", "skills")) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + entry := store.Get("my-skill") + if entry == nil { + t.Fatal("expected metadata entry for my-skill") } - if !strings.Contains(registryContent, "name: my-skill") { - t.Errorf("project registry should contain bare 'name: my-skill', got:\n%s", registryContent) + if entry.Group != "tools" { + t.Errorf("metadata group = %q, want %q", entry.Group, "tools") } } diff --git a/tests/integration/install_into_test.go b/tests/integration/install_into_test.go index 7d780437..a162e086 100644 --- a/tests/integration/install_into_test.go +++ b/tests/integration/install_into_test.go @@ -124,15 +124,11 @@ func TestInstallProject_Into(t *testing.T) { t.Error("skill should be installed to .skillshare/skills/tools/my-skill/") } - // Verify .gitignore entry includes the nested path + // Verify .gitignore exists (created during init) gitignorePath := filepath.Join(projectRoot, ".skillshare", ".gitignore") if !sb.FileExists(gitignorePath) { t.Fatal(".skillshare/.gitignore should exist") } - content := sb.ReadFile(gitignorePath) - if !contains(content, "skills/tools/my-skill/") { - t.Errorf(".gitignore should contain 'skills/tools/my-skill/', got:\n%s", content) - } } func TestInstallProject_Into_NoSource_Rejected(t *testing.T) { diff --git a/tests/integration/install_project_test.go b/tests/integration/install_project_test.go index 75e76fa8..ac79f340 100644 --- a/tests/integration/install_project_test.go +++ b/tests/integration/install_project_test.go @@ -7,6 +7,7 @@ import ( "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -84,12 +85,22 @@ func TestInstallProject_FromConfig_SkipsExisting(t *testing.T) { "SKILL.md": "# Already", }) - // Write config referencing it + // Write metadata entry so config-based install sees it + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + store, err := install.LoadMetadata(skillsDir) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + store.Set("already-here", &install.MetadataEntry{ + Source: "someone/skills/already-here", + Type: "github", + }) + if err := store.Save(skillsDir); err != nil { + t.Fatalf("failed to save metadata: %v", err) + } + sb.WriteProjectConfig(projectRoot, `targets: - claude -skills: - - name: already-here - source: someone/skills/already-here `) // install (no args) → should skip existing diff --git a/tests/integration/list_test.go b/tests/integration/list_test.go index 8a9bd804..da75b4e7 100644 --- a/tests/integration/list_test.go +++ b/tests/integration/list_test.go @@ -6,7 +6,9 @@ import ( "encoding/json" "strings" "testing" + "time" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -51,11 +53,10 @@ func TestList_Verbose_ShowsDetails(t *testing.T) { // Create skill with metadata sb.CreateSkill("meta-skill", map[string]string{ "SKILL.md": "# Meta Skill", - ".skillshare-meta.json": `{ - "source": "github.com/user/repo/path/to/skill", - "type": "github-subdir", - "installed_at": "2024-01-15T10:30:00Z" -}`, + }) + writeListMeta(t, sb.SourcePath, "meta-skill", &install.MetadataEntry{ + Source: "github.com/user/repo/path/to/skill", + Type: "github-subdir", }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -187,11 +188,10 @@ func TestList_ShowsSourceInfo(t *testing.T) { // Create skill with metadata (installed) sb.CreateSkill("installed-skill", map[string]string{ "SKILL.md": "# Installed", - ".skillshare-meta.json": `{ - "source": "github.com/example/repo", - "type": "github", - "installed_at": "2024-01-15T10:30:00Z" -}`, + }) + writeListMeta(t, sb.SourcePath, "installed-skill", &install.MetadataEntry{ + Source: "github.com/example/repo", + Type: "github", }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -298,11 +298,10 @@ func TestList_FilterByType_Local(t *testing.T) { // GitHub skill (has metadata with source) sb.CreateSkill("from-github", map[string]string{ "SKILL.md": "# GitHub", - ".skillshare-meta.json": `{ - "source": "github.com/user/repo", - "type": "github", - "installed_at": "2024-06-01T00:00:00Z" -}`, + }) + writeListMeta(t, sb.SourcePath, "from-github", &install.MetadataEntry{ + Source: "github.com/user/repo", + Type: "github", }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -326,11 +325,10 @@ func TestList_FilterByType_Github(t *testing.T) { // GitHub skill (has metadata with source) sb.CreateSkill("from-github", map[string]string{ "SKILL.md": "# GitHub", - ".skillshare-meta.json": `{ - "source": "github.com/user/repo", - "type": "github", - "installed_at": "2024-06-01T00:00:00Z" -}`, + }) + writeListMeta(t, sb.SourcePath, "from-github", &install.MetadataEntry{ + Source: "github.com/user/repo", + Type: "github", }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -350,19 +348,21 @@ func TestList_SortNewest(t *testing.T) { sb.CreateSkill("old-skill", map[string]string{ "SKILL.md": "# Old", - ".skillshare-meta.json": `{ - "source": "github.com/user/old", - "type": "github", - "installed_at": "2023-01-01T00:00:00Z" -}`, + }) + oldTime, _ := time.Parse(time.RFC3339, "2023-01-01T00:00:00Z") + writeListMeta(t, sb.SourcePath, "old-skill", &install.MetadataEntry{ + Source: "github.com/user/old", + Type: "github", + InstalledAt: oldTime, }) sb.CreateSkill("new-skill", map[string]string{ "SKILL.md": "# New", - ".skillshare-meta.json": `{ - "source": "github.com/user/new", - "type": "github", - "installed_at": "2025-12-01T00:00:00Z" -}`, + }) + newTime, _ := time.Parse(time.RFC3339, "2025-12-01T00:00:00Z") + writeListMeta(t, sb.SourcePath, "new-skill", &install.MetadataEntry{ + Source: "github.com/user/new", + Type: "github", + InstalledAt: newTime, }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -423,11 +423,10 @@ func TestList_SearchWithFilter(t *testing.T) { // GitHub skill with "react" in source sb.CreateSkill("react-remote", map[string]string{ "SKILL.md": "# React Remote", - ".skillshare-meta.json": `{ - "source": "github.com/user/react-kit", - "type": "github", - "installed_at": "2024-06-01T00:00:00Z" -}`, + }) + writeListMeta(t, sb.SourcePath, "react-remote", &install.MetadataEntry{ + Source: "github.com/user/react-kit", + Type: "github", }) sb.WriteConfig(`source: ` + sb.SourcePath + ` @@ -449,8 +448,11 @@ func TestList_JSON_OutputsValidJSON(t *testing.T) { sb.CreateSkill("alpha", map[string]string{"SKILL.md": "# Alpha"}) sb.CreateSkill("beta", map[string]string{ - "SKILL.md": "# Beta", - ".skillshare-meta.json": `{"source":"github.com/user/repo","type":"github","installed_at":"2024-06-01T00:00:00Z"}`, + "SKILL.md": "# Beta", + }) + writeListMeta(t, sb.SourcePath, "beta", &install.MetadataEntry{ + Source: "github.com/user/repo", + Type: "github", }) sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") @@ -564,3 +566,16 @@ func TestList_NoTUI_WithPattern(t *testing.T) { t.Errorf("should not contain 'vue-helper' when filtered") } } + +// writeListMeta writes a metadata entry to the centralized .metadata.json in sourceDir. +func writeListMeta(t *testing.T, sourceDir, skillName string, entry *install.MetadataEntry) { + t.Helper() + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeListMeta: load: %v", err) + } + store.Set(skillName, entry) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeListMeta: save: %v", err) + } +} diff --git a/tests/integration/sync_project_test.go b/tests/integration/sync_project_test.go index c16e4b99..26cd20b6 100644 --- a/tests/integration/sync_project_test.go +++ b/tests/integration/sync_project_test.go @@ -5,9 +5,9 @@ package integration import ( "os" "path/filepath" - "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -121,26 +121,27 @@ func TestSyncProject_PreservesRegistryEntries(t *testing.T) { "SKILL.md": "# Local Skill", }) - // Write a registry with a remote-installed skill that has NO files on disk. - // Sync must NOT prune this entry — the registry is the source of truth for installations. - registryPath := filepath.Join(projectRoot, ".skillshare", "registry.yaml") - registryContent := "skills:\n - name: remote-tool\n source: github.com/someone/remote-tool\n" - os.WriteFile(registryPath, []byte(registryContent), 0644) + // Write metadata with a remote-installed skill that has NO files on disk. + // Sync must NOT prune this entry — the metadata is the source of truth for installations. + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + store := install.NewMetadataStore() + store.Set("remote-tool", &install.MetadataEntry{Source: "github.com/someone/remote-tool"}) + store.Save(skillsDir) result := sb.RunCLIInDir(projectRoot, "sync", "-p") result.AssertSuccess(t) - // Verify registry still contains the remote-tool entry - data, err := os.ReadFile(registryPath) + // Verify metadata still contains the remote-tool entry + store2, err := install.LoadMetadata(skillsDir) if err != nil { - t.Fatalf("failed to read registry: %v", err) + t.Fatalf("failed to load metadata: %v", err) } - content := string(data) - if !strings.Contains(content, "remote-tool") { - t.Errorf("sync should preserve registry entry for installed skill without local files, got:\n%s", content) + if !store2.Has("remote-tool") { + t.Errorf("sync should preserve metadata entry for installed skill without local files") } - if !strings.Contains(content, "github.com/someone/remote-tool") { - t.Errorf("sync should preserve source in registry entry, got:\n%s", content) + entry := store2.Get("remote-tool") + if entry == nil || entry.Source != "github.com/someone/remote-tool" { + t.Errorf("sync should preserve source in metadata entry") } } diff --git a/tests/integration/uninstall_project_test.go b/tests/integration/uninstall_project_test.go index ca862422..0a805eb5 100644 --- a/tests/integration/uninstall_project_test.go +++ b/tests/integration/uninstall_project_test.go @@ -6,9 +6,9 @@ import ( "encoding/json" "os" "path/filepath" - "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -67,9 +67,12 @@ func TestUninstallProject_UpdatesConfig(t *testing.T) { result := sb.RunCLIInDir(projectRoot, "uninstall", "remote", "--force", "-p") result.AssertSuccess(t) - registryContent := sb.ReadFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml")) - if strings.Contains(registryContent, "remote") { - t.Error("registry should not contain removed skill") + store, err := install.LoadMetadata(filepath.Join(projectRoot, ".skillshare", "skills")) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("remote") { + t.Error("metadata should not contain removed skill") } } @@ -128,9 +131,12 @@ func TestUninstallProject_MultipleSkills(t *testing.T) { t.Error("skill-b should be removed") } - registryContent := sb.ReadFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml")) - if strings.Contains(registryContent, "skill-a") || strings.Contains(registryContent, "skill-b") { - t.Error("registry should not contain removed skills") + store, err := install.LoadMetadata(filepath.Join(projectRoot, ".skillshare", "skills")) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("skill-a") || store.Has("skill-b") { + t.Error("metadata should not contain removed skills") } } @@ -194,17 +200,13 @@ func TestUninstallProject_GroupDir_RemovesConfigEntries(t *testing.T) { sb.WriteProjectConfig(projectRoot, `targets: - claude `) - os.WriteFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml"), []byte(`skills: - - name: skill-a - source: github.com/org/repo/skill-a - group: mygroup - - name: skill-b - source: github.com/org/repo/skill-b - group: mygroup - - name: skill-c - source: github.com/org/repo/skill-c - group: other -`), 0644) + // Write metadata directly + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + store := install.NewMetadataStore() + store.Set("skill-a", &install.MetadataEntry{Source: "github.com/org/repo/skill-a", Group: "mygroup"}) + store.Set("skill-b", &install.MetadataEntry{Source: "github.com/org/repo/skill-b", Group: "mygroup"}) + store.Set("skill-c", &install.MetadataEntry{Source: "github.com/org/repo/skill-c", Group: "other"}) + store.Save(skillsDir) result := sb.RunCLIInDir(projectRoot, "uninstall", "mygroup", "--force", "-p") result.AssertSuccess(t) @@ -214,16 +216,19 @@ func TestUninstallProject_GroupDir_RemovesConfigEntries(t *testing.T) { t.Error("mygroup directory should be removed") } - // Registry should no longer contain mygroup skills - registryContent := sb.ReadFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml")) - if strings.Contains(registryContent, "skill-a") { - t.Error("registry should not contain skill-a after group uninstall") + // Metadata should no longer contain mygroup skills + store2, err := install.LoadMetadata(skillsDir) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store2.Has("skill-a") { + t.Error("metadata should not contain skill-a after group uninstall") } - if strings.Contains(registryContent, "skill-b") { - t.Error("registry should not contain skill-b after group uninstall") + if store2.Has("skill-b") { + t.Error("metadata should not contain skill-b after group uninstall") } - if !strings.Contains(registryContent, "skill-c") { - t.Error("registry should still contain skill-c from other group") + if !store2.Has("skill-c") { + t.Error("metadata should still contain skill-c from other group") } } @@ -239,30 +244,29 @@ func TestUninstallProject_GroupDirWithTrailingSlash_RemovesConfigEntries(t *test sb.WriteProjectConfig(projectRoot, `targets: - claude `) - os.WriteFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml"), []byte(`skills: - - name: scan - source: github.com/org/repo/scan - group: security - - name: hardening - source: github.com/org/repo/hardening - group: security - - name: keep - source: github.com/org/repo/keep - group: other -`), 0644) + // Write metadata directly to .metadata.json + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + store := install.NewMetadataStore() + store.Set("scan", &install.MetadataEntry{Source: "github.com/org/repo/scan", Group: "security"}) + store.Set("hardening", &install.MetadataEntry{Source: "github.com/org/repo/hardening", Group: "security"}) + store.Set("keep", &install.MetadataEntry{Source: "github.com/org/repo/keep", Group: "other"}) + store.Save(skillsDir) result := sb.RunCLIInDir(projectRoot, "uninstall", "security/", "--force", "-p") result.AssertSuccess(t) result.AssertAnyOutputContains(t, "Uninstalled group: security") - registryContent := sb.ReadFile(filepath.Join(projectRoot, ".skillshare", "registry.yaml")) - if strings.Contains(registryContent, "scan") { - t.Error("registry should not contain scan after security/ uninstall") + store, err := install.LoadMetadata(filepath.Join(projectRoot, ".skillshare", "skills")) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("scan") { + t.Error("metadata should not contain scan after security/ uninstall") } - if strings.Contains(registryContent, "hardening") { - t.Error("registry should not contain hardening after security/ uninstall") + if store.Has("hardening") { + t.Error("metadata should not contain hardening after security/ uninstall") } - if !strings.Contains(registryContent, "keep") { - t.Error("registry should still contain keep from other group") + if !store.Has("keep") { + t.Error("metadata should still contain keep from other group") } } diff --git a/tests/integration/uninstall_test.go b/tests/integration/uninstall_test.go index 8c0852e4..ffb39d4b 100644 --- a/tests/integration/uninstall_test.go +++ b/tests/integration/uninstall_test.go @@ -5,9 +5,9 @@ package integration import ( "os" "path/filepath" - "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -413,17 +413,13 @@ func TestUninstall_GroupDir_RemovesConfigEntries(t *testing.T) { sb.WriteConfig(`source: ` + sb.SourcePath + ` targets: {} -skills: - - name: skill-a - source: github.com/org/repo/skill-a - group: mygroup - - name: skill-b - source: github.com/org/repo/skill-b - group: mygroup - - name: skill-c - source: github.com/org/repo/skill-c - group: other `) + // Write metadata directly to .metadata.json + store := install.NewMetadataStore() + store.Set("skill-a", &install.MetadataEntry{Source: "github.com/org/repo/skill-a", Group: "mygroup"}) + store.Set("skill-b", &install.MetadataEntry{Source: "github.com/org/repo/skill-b", Group: "mygroup"}) + store.Set("skill-c", &install.MetadataEntry{Source: "github.com/org/repo/skill-c", Group: "other"}) + store.Save(sb.SourcePath) result := sb.RunCLI("uninstall", "mygroup", "-f") result.AssertSuccess(t) @@ -433,18 +429,20 @@ skills: t.Error("mygroup directory should be removed") } - // Registry should no longer contain mygroup skills - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if strings.Contains(registryContent, "skill-a") { - t.Error("registry should not contain skill-a after group uninstall") + // Metadata should no longer contain mygroup skills + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("skill-a") { + t.Error("metadata should not contain skill-a after group uninstall") } - if strings.Contains(registryContent, "skill-b") { - t.Error("registry should not contain skill-b after group uninstall") + if store.Has("skill-b") { + t.Error("metadata should not contain skill-b after group uninstall") } // other group should be untouched - if !strings.Contains(registryContent, "skill-c") { - t.Error("registry should still contain skill-c from other group") + if !store.Has("skill-c") { + t.Error("metadata should still contain skill-c from other group") } } @@ -488,31 +486,29 @@ func TestUninstall_GroupDirWithTrailingSlash_RemovesConfigEntries(t *testing.T) sb.WriteConfig(`source: ` + sb.SourcePath + ` targets: {} -skills: - - name: scan - source: github.com/org/repo/scan - group: security - - name: hardening - source: github.com/org/repo/hardening - group: security - - name: keep - source: github.com/org/repo/keep - group: other `) + // Write metadata directly to .metadata.json + store := install.NewMetadataStore() + store.Set("scan", &install.MetadataEntry{Source: "github.com/org/repo/scan", Group: "security"}) + store.Set("hardening", &install.MetadataEntry{Source: "github.com/org/repo/hardening", Group: "security"}) + store.Set("keep", &install.MetadataEntry{Source: "github.com/org/repo/keep", Group: "other"}) + store.Save(sb.SourcePath) result := sb.RunCLI("uninstall", "security/", "-f") result.AssertSuccess(t) - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if strings.Contains(registryContent, "scan") { - t.Error("registry should not contain scan after security/ uninstall") + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("scan") { + t.Error("metadata should not contain scan after security/ uninstall") } - if strings.Contains(registryContent, "hardening") { - t.Error("registry should not contain hardening after security/ uninstall") + if store.Has("hardening") { + t.Error("metadata should not contain hardening after security/ uninstall") } - if !strings.Contains(registryContent, "keep") { - t.Error("registry should still contain keep from other group") + if !store.Has("keep") { + t.Error("metadata should still contain keep from other group") } } @@ -546,11 +542,13 @@ skills: } } - // Registry skills should be cleared - registryPath := filepath.Join(sb.SourcePath, "registry.yaml") - registryContent := sb.ReadFile(registryPath) - if strings.Contains(registryContent, "alpha") || strings.Contains(registryContent, "beta") || strings.Contains(registryContent, "gamma") { - t.Error("registry should not contain any skills after --all uninstall") + // Metadata should be cleared of all skills + store, err := install.LoadMetadata(sb.SourcePath) + if err != nil { + t.Fatalf("load metadata: %v", err) + } + if store.Has("alpha") || store.Has("beta") || store.Has("gamma") { + t.Error("metadata should not contain any skills after --all uninstall") } } diff --git a/tests/integration/update_project_test.go b/tests/integration/update_project_test.go index 558bd48c..a1680a8a 100644 --- a/tests/integration/update_project_test.go +++ b/tests/integration/update_project_test.go @@ -3,11 +3,11 @@ package integration import ( - "encoding/json" "os" "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -42,9 +42,7 @@ func TestUpdateProject_DryRun(t *testing.T) { skillDir := sb.CreateProjectSkill(projectRoot, "remote", map[string]string{ "SKILL.md": "# Remote", }) - meta := map[string]interface{}{"source": "/tmp/fake-source", "type": "local"} - metaJSON, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), metaJSON, 0644) + writeProjectMeta(t, skillDir) result := sb.RunCLIInDir(projectRoot, "update", "remote", "--dry-run", "-p") result.AssertSuccess(t) @@ -69,10 +67,19 @@ func TestUpdateProject_AllDryRun_SkipsLocal(t *testing.T) { func writeProjectMeta(t *testing.T, skillDir string) { t.Helper() - meta := map[string]any{"source": "/tmp/fake-source", "type": "local"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatalf("failed to write meta: %v", err) + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeProjectMeta: load: %v", err) + } + store.Set(rel, &install.MetadataEntry{ + Source: "/tmp/fake-source", + Type: "local", + }) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeProjectMeta: save: %v", err) } } @@ -392,21 +399,21 @@ func TestUpdateProject_BatchAll_SubdirSkills_NoDuplication(t *testing.T) { skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") repoURL := "file://" + remoteDir + store, _ := install.LoadMetadata(skillsDir) for _, name := range []string{"alpha", "beta"} { localDir := filepath.Join(skillsDir, name) os.MkdirAll(localDir, 0755) os.WriteFile(filepath.Join(localDir, "SKILL.md"), []byte("---\nname: "+name+"\n---\n# "+name+" v1"), 0644) - meta := map[string]any{ - "source": repoURL + "//skills/" + name, - "type": "git", - "repo_url": repoURL, - "subdir": "skills/" + name, - } - metaJSON, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(localDir, ".skillshare-meta.json"), metaJSON, 0644) + store.Set(name, &install.MetadataEntry{ + Source: repoURL + "//skills/" + name, + Type: "git", + RepoURL: repoURL, + Subdir: "skills/" + name, + }) } + store.Save(skillsDir) // 3. First update --all result1 := sb.RunCLIInDir(projectRoot, "update", "--all", "-p", "--skip-audit") diff --git a/tests/integration/update_prune_test.go b/tests/integration/update_prune_test.go index cd1dbba4..f962cc35 100644 --- a/tests/integration/update_prune_test.go +++ b/tests/integration/update_prune_test.go @@ -3,11 +3,11 @@ package integration import ( - "encoding/json" "os" "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -289,18 +289,24 @@ func TestUpdate_Prune_NestedIntoSkill(t *testing.T) { } } -// writeMetaForRepo writes metadata matching a repo-installed skill. +// writeMetaForRepo writes metadata matching a repo-installed skill to the centralized store. func writeMetaForRepo(t *testing.T, skillDir, repoURL, subdir string) { t.Helper() - meta := map[string]any{ - "source": repoURL + "//" + subdir, - "type": "github", - "repo_url": repoURL, - "subdir": subdir, - "version": "abc123", + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("writeMetaForRepo: load: %v", err) } - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatalf("writeMetaForRepo: %v", err) + store.Set(rel, &install.MetadataEntry{ + Source: repoURL + "//" + subdir, + Type: "github", + RepoURL: repoURL, + Subdir: subdir, + Version: "abc123", + }) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("writeMetaForRepo: save: %v", err) } } diff --git a/tests/integration/update_test.go b/tests/integration/update_test.go index 481408b9..da4dfe10 100644 --- a/tests/integration/update_test.go +++ b/tests/integration/update_test.go @@ -3,22 +3,47 @@ package integration import ( - "encoding/json" "os" "os/exec" "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) -// writeMeta writes a minimal .skillshare-meta.json to make a skill updatable. +// writeMeta writes a minimal metadata entry to .metadata.json to make a skill updatable. +// It finds the source root ("skills" ancestor) and uses the relative path as the key. func writeMeta(t *testing.T, skillDir string) { t.Helper() - meta := map[string]any{"source": "/tmp/fake-source", "type": "local"} - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), data, 0644); err != nil { - t.Fatalf("failed to write meta: %v", err) + sourceDir := findSourceRoot(skillDir) + rel, _ := filepath.Rel(sourceDir, skillDir) + + store, err := install.LoadMetadata(sourceDir) + if err != nil { + t.Fatalf("failed to load metadata: %v", err) + } + store.Set(rel, &install.MetadataEntry{ + Source: "/tmp/fake-source", + Type: "local", + }) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("failed to save metadata: %v", err) + } +} + +// findSourceRoot walks up from skillDir to find the "skills" ancestor directory. +func findSourceRoot(skillDir string) string { + dir := skillDir + for { + if filepath.Base(dir) == "skills" { + return dir + } + parent := filepath.Dir(dir) + if parent == dir { + return filepath.Dir(skillDir) + } + dir = parent } } From 424a9df7a4dd6d5a4887c49d8200861c54593d9f Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 14:29:08 +0800 Subject: [PATCH 084/205] chore: remove debug test files and local metadata --- .skillshare/.gitignore | 2 -- .skillshare/skills/.metadata.json | 31 ----------------- tests/integration/debug_test.go | 55 ------------------------------- tests/integration/diag_test.go | 45 ------------------------- 4 files changed, 133 deletions(-) delete mode 100644 .skillshare/skills/.metadata.json delete mode 100644 tests/integration/debug_test.go delete mode 100644 tests/integration/diag_test.go diff --git a/.skillshare/.gitignore b/.skillshare/.gitignore index 20cd0e09..2b2bd66a 100644 --- a/.skillshare/.gitignore +++ b/.skillshare/.gitignore @@ -1,6 +1,4 @@ # BEGIN SKILLSHARE MANAGED - DO NOT EDIT logs/ skills/mdproof/ -skills/mygroup/keep-nested/ -skills/mygroup/stale-nested/ # END SKILLSHARE MANAGED diff --git a/.skillshare/skills/.metadata.json b/.skillshare/skills/.metadata.json deleted file mode 100644 index 5406dfe8..00000000 --- a/.skillshare/skills/.metadata.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "version": 1, - "entries": { - "keep-nested": { - "source": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git//skills/keep-nested", - "type": "git-https-subdir", - "group": "mygroup", - "installed_at": "2026-04-08T14:06:02.485057+08:00", - "repo_url": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git", - "subdir": "skills/keep-nested", - "version": "4f7bb1e", - "tree_hash": "25a55cd8bcf3f16b1bb9b22ac6088c784180df96", - "file_hashes": { - "SKILL.md": "sha256:1a1acd2305ec14b4a501dc93c1f6c008d60cdb0c8892b0e690e0e5e19846ee70" - } - }, - "stale-nested": { - "source": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git//skills/stale-nested", - "type": "git-https-subdir", - "group": "mygroup", - "installed_at": "2026-04-08T14:06:02.571038+08:00", - "repo_url": "file:///var/folders/8k/sbz6rywx269c6fxxddgnzf8h0000gn/T/nested-test3075288352/nested.git", - "subdir": "skills/stale-nested", - "version": "4f7bb1e", - "tree_hash": "57abb6088c17b6878b3547d4b79a755940990c3f", - "file_hashes": { - "SKILL.md": "sha256:c995325f2bd6ab9e90fdc62f79d955609ba2fcdbca97b9b52127eb573fdc80f9" - } - } - } -} diff --git a/tests/integration/debug_test.go b/tests/integration/debug_test.go deleted file mode 100644 index fe7a6b65..00000000 --- a/tests/integration/debug_test.go +++ /dev/null @@ -1,55 +0,0 @@ -//go:build !online - -package integration - -import ( - "fmt" - "os" - "path/filepath" - "skillshare/internal/install" - "skillshare/internal/testutil" - "testing" -) - -func TestDebug_NestedInstall(t *testing.T) { - sb := testutil.NewSandbox(t) - defer sb.Cleanup() - setupGlobalConfig(sb) - - remoteRepo := filepath.Join(sb.Root, "nested.git") - workClone := filepath.Join(sb.Root, "nested-work") - gitInit(t, remoteRepo, true) - gitClone(t, remoteRepo, workClone) - - for _, name := range []string{"keep-nested", "stale-nested"} { - os.MkdirAll(filepath.Join(workClone, "skills", name), 0755) - os.WriteFile(filepath.Join(workClone, "skills", name, "SKILL.md"), - []byte("---\nname: "+name+"\n---\n# "+name), 0644) - } - gitAddCommit(t, workClone, "add skills") - gitPush(t, workClone) - - for _, name := range []string{"keep-nested", "stale-nested"} { - r := sb.RunCLI("install", "file://"+remoteRepo+"//skills/"+name, "--into", "mygroup", "--skip-audit") - fmt.Printf("Install %s stdout: %s\n", name, r.Stdout) - fmt.Printf("Install %s stderr: %s\n", name, r.Stderr) - fmt.Printf("Install %s exit: %d\n", name, r.ExitCode) - r.AssertSuccess(t) - } - - fmt.Println("\n=== Files after install ===") - filepath.Walk(sb.SourcePath, func(path string, info os.FileInfo, err error) error { - if err == nil { - rel, _ := filepath.Rel(sb.SourcePath, path) - fmt.Println(" -", rel) - } - return nil - }) - - metaPath := filepath.Join(sb.SourcePath, ".metadata.json") - data, err := os.ReadFile(metaPath) - fmt.Printf("\n.metadata.json (%v): %s\n", err, data) - - store, loadErr := install.LoadMetadata(sb.SourcePath) - fmt.Printf("store entries (%v): %v\n", loadErr, store.List()) -} diff --git a/tests/integration/diag_test.go b/tests/integration/diag_test.go deleted file mode 100644 index e2e7a507..00000000 --- a/tests/integration/diag_test.go +++ /dev/null @@ -1,45 +0,0 @@ -//go:build !online - -package integration - -import ( - "os" - "path/filepath" - "testing" - - "skillshare/internal/install" - "skillshare/internal/testutil" -) - -func TestDiag_IntoMetadata(t *testing.T) { - sb := testutil.NewSandbox(t) - defer sb.Cleanup() - - sb.WriteConfig(`source: ` + sb.SourcePath + ` -targets: {} -`) - - // Create a local skill - localSkill := filepath.Join(sb.Root, "pdf-skill") - os.MkdirAll(localSkill, 0755) - os.WriteFile(filepath.Join(localSkill, "SKILL.md"), []byte("# PDF Skill"), 0644) - - // Install with --into frontend - result := sb.RunCLI("install", localSkill, "--into", "frontend") - _ = result - - // Read the raw content of .metadata.json - metaPath := filepath.Join(sb.SourcePath, ".metadata.json") - data, err := os.ReadFile(metaPath) - t.Logf("metadata.json read error: %v", err) - t.Logf("metadata.json content: %s", string(data)) - - // Also check the sidecar - sidecarPath := filepath.Join(sb.SourcePath, "frontend", "pdf-skill", ".skillshare-meta.json") - data2, err2 := os.ReadFile(sidecarPath) - t.Logf("sidecar read error: %v", err2) - t.Logf("sidecar content: %s", string(data2)) - - store, _ := install.LoadMetadata(sb.SourcePath) - t.Logf("store entries: %v", store.List()) -} From 34b6c2a9e06654038a0841deac9d6e9917d08215 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 14:38:25 +0800 Subject: [PATCH 085/205] test: fix remaining unit tests for centralized metadata store keys - Reconcile tests use bare name as store key (not group/name) - updatable_test uses MetadataStore directly instead of WriteMeta dual-write - Tracked repos marked with Tracked: true in store - All unit + integration tests pass (make check green) --- internal/config/project_reconcile_test.go | 6 +-- internal/config/reconcile_test.go | 6 +-- internal/install/updatable_test.go | 45 ++++++++++++++++++++--- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/internal/config/project_reconcile_test.go b/internal/config/project_reconcile_test.go index 6648f635..809c391f 100644 --- a/internal/config/project_reconcile_test.go +++ b/internal/config/project_reconcile_test.go @@ -138,7 +138,7 @@ func TestReconcileProjectSkills_NestedSkillSetsGroup(t *testing.T) { Targets: []ProjectTargetEntry{{Name: "claude"}}, } store := install.NewMetadataStore() - store.Set("tools/my-skill", &install.MetadataEntry{ + store.Set("my-skill", &install.MetadataEntry{ Source: "github.com/user/repo", Group: "tools", }) @@ -147,9 +147,9 @@ func TestReconcileProjectSkills_NestedSkillSetsGroup(t *testing.T) { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - entry := store.Get("tools/my-skill") + entry := store.Get("my-skill") if entry == nil { - t.Fatal("expected store to have 'tools/my-skill'") + t.Fatal("expected store to have 'my-skill'") } if entry.Group != "tools" { t.Errorf("expected group 'tools', got %q", entry.Group) diff --git a/internal/config/reconcile_test.go b/internal/config/reconcile_test.go index 13deedac..061db732 100644 --- a/internal/config/reconcile_test.go +++ b/internal/config/reconcile_test.go @@ -161,7 +161,7 @@ func TestReconcileGlobalSkills_NestedSkillSetsGroup(t *testing.T) { cfg := &Config{Source: sourceDir} store := install.NewMetadataStore() - store.Set("frontend/pdf", &install.MetadataEntry{ + store.Set("pdf", &install.MetadataEntry{ Source: "anthropics/skills/skills/pdf", Group: "frontend", }) @@ -170,9 +170,9 @@ func TestReconcileGlobalSkills_NestedSkillSetsGroup(t *testing.T) { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - entry := store.Get("frontend/pdf") + entry := store.Get("pdf") if entry == nil { - t.Fatal("expected store to have 'frontend/pdf'") + t.Fatal("expected store to have 'pdf'") } if entry.Group != "frontend" { t.Errorf("expected group 'frontend', got %q", entry.Group) diff --git a/internal/install/updatable_test.go b/internal/install/updatable_test.go index 2d8ca15b..c1a739c7 100644 --- a/internal/install/updatable_test.go +++ b/internal/install/updatable_test.go @@ -13,8 +13,29 @@ func createSkillWithMeta(t *testing.T, baseDir, name string, meta *SkillMeta) { os.MkdirAll(dir, 0755) os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("---\nname: "+name+"\n---\n"), 0644) if meta != nil { - if err := WriteMeta(dir, meta); err != nil { - t.Fatalf("write meta for %s: %v", name, err) + // Write to centralized .metadata.json + store, _ := LoadMetadata(baseDir) + key := name + group := "" + if idx := strings.LastIndex(name, "/"); idx >= 0 { + group = name[:idx] + key = name[idx+1:] + } + store.Set(key, &MetadataEntry{ + Source: meta.Source, + Kind: meta.Kind, + Type: meta.Type, + Group: group, + InstalledAt: meta.InstalledAt, + RepoURL: meta.RepoURL, + Subdir: meta.Subdir, + Version: meta.Version, + TreeHash: meta.TreeHash, + FileHashes: meta.FileHashes, + Branch: meta.Branch, + }) + if err := store.Save(baseDir); err != nil { + t.Fatalf("save metadata for %s: %v", name, err) } } } @@ -45,6 +66,12 @@ func TestGetUpdatableSkills_SkipsTrackedRepos(t *testing.T) { Source: "github.com/team/repo", Type: "github", }) + // Mark as tracked in the store + store, _ := LoadMetadata(src) + if e := store.Get("_team-repo"); e != nil { + e.Tracked = true + store.Save(src) + } // Also create a nested skill inside tracked repo nestedDir := filepath.Join(src, "_team-repo", "sub-skill") os.MkdirAll(nestedDir, 0755) @@ -86,7 +113,9 @@ func TestGetUpdatableSkills_Nested(t *testing.T) { nestedDir := filepath.Join(src, "group", "my-skill") os.MkdirAll(nestedDir, 0755) os.WriteFile(filepath.Join(nestedDir, "SKILL.md"), []byte("nested"), 0644) - WriteMeta(nestedDir, &SkillMeta{Source: "github.com/u/r", Type: "github"}) + store, _ := LoadMetadata(src) + store.Set("my-skill", &MetadataEntry{Source: "github.com/u/r", Type: "github", Group: "group"}) + store.Save(src) skills, err := GetUpdatableSkills(src) if err != nil { @@ -174,17 +203,20 @@ func TestFindRepoInstalls_MatchesByRepoURL(t *testing.T) { func TestFindRepoInstalls_MatchesNested(t *testing.T) { src := t.TempDir() + store, _ := LoadMetadata(src) // Skills under group/ for _, name := range []string{"scan", "learn", "archive"} { dir := filepath.Join(src, "feature-radar", name) os.MkdirAll(dir, 0755) os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("# "+name), 0644) - WriteMeta(dir, &SkillMeta{ + store.Set(name, &MetadataEntry{ Source: "https://github.com/runkids/feature-radar", Type: "github", RepoURL: "https://github.com/runkids/feature-radar.git", + Group: "feature-radar", }) } + store.Save(src) matches := FindRepoInstalls(src, "git@github.com:runkids/feature-radar.git") if len(matches) != 3 { @@ -213,10 +245,13 @@ func TestCheckCrossPathDuplicate_BlocksDifferentPath(t *testing.T) { // Existing install under group/ dir := filepath.Join(src, "my-group", "skill-a") os.MkdirAll(dir, 0755) - WriteMeta(dir, &SkillMeta{ + store, _ := LoadMetadata(src) + store.Set("skill-a", &MetadataEntry{ Source: "https://github.com/owner/repo", Type: "github", RepoURL: "https://github.com/owner/repo.git", + Group: "my-group", }) + store.Save(src) // Root install (no --into) should be blocked err := CheckCrossPathDuplicate(src, "https://github.com/owner/repo.git", "") From 2acfec82eca9a3dbeba23cd97edb42fe509e4577 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 15:00:22 +0800 Subject: [PATCH 086/205] =?UTF-8?q?refactor:=20simplify=20metadata=20?= =?UTF-8?q?=E2=80=94=20extract=20helpers,=20remove=20dual-write,=20add=20k?= =?UTF-8?q?ind=20constants?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Code quality fixes from review: - Extract RemoveByNames() to eliminate copy-paste in uninstall Phase 7 - Add MetadataKindSkill/MetadataKindAgent constants - Remove dual-write shim (findSkillsRoot + writeMetaToCentralized) - install_apply.go uses WriteMetaToStore with SourceDir (no sidecar) - Move LoadMetadata out of walk loops (P0 efficiency fix) - isGroupDir accepts store parameter to avoid redundant loads - Add SourceDir to InstallOptions for --into group support --- cmd/skillshare/check.go | 3 +- cmd/skillshare/install.go | 1 + cmd/skillshare/uninstall.go | 22 +----- cmd/skillshare/uninstall_project.go | 22 +----- cmd/skillshare/update.go | 4 +- cmd/skillshare/update_project.go | 2 +- cmd/skillshare/update_resolve.go | 7 +- cmd/skillshare/update_test.go | 4 +- internal/install/install.go | 1 + internal/install/install_apply.go | 23 ++++-- internal/install/install_local_test.go | 19 +++-- internal/install/meta.go | 66 ----------------- internal/install/metadata.go | 72 +++++++++++++++++++ internal/server/handler_install.go | 2 + .../search_batch_meta_roundtrip_test.go | 19 ++--- 15 files changed, 129 insertions(+), 138 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 0e28437b..70b210dd 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -654,13 +654,14 @@ func runCheckFiltered(sourceDir string, opts *checkOptions) error { resolveSpinner = ui.StartSpinner("Resolving skills...") } + checkStore, _ := install.LoadMetadata(sourceDir) var targets []updateTarget seen := map[string]bool{} var resolveWarnings []string for _, name := range opts.names { // Check group directory first (same logic as update) - if isGroupDir(name, sourceDir) { + if isGroupDir(name, sourceDir, checkStore) { groupMatches, groupErr := resolveGroupUpdatable(name, sourceDir) if groupErr != nil { resolveWarnings = append(resolveWarnings, fmt.Sprintf("%s: %v", name, groupErr)) diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index 27909185..d194b64b 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -402,6 +402,7 @@ func cmdInstall(args []string) error { return err } + parsed.opts.SourceDir = cfg.Source source, resolvedFromMeta, err := resolveInstallSource(parsed.sourceArg, parsed.opts, cfg) if err == nil && parsed.opts.Branch != "" { source.Branch = parsed.opts.Branch diff --git a/cmd/skillshare/uninstall.go b/cmd/skillshare/uninstall.go index 11ee4c91..7bce6614 100644 --- a/cmd/skillshare/uninstall.go +++ b/cmd/skillshare/uninstall.go @@ -1074,27 +1074,7 @@ func cmdUninstall(args []string) error { for _, t := range succeeded { removedNames[t.name] = true } - for _, name := range skillsStore.List() { - // Direct match by bare name or full path (group/name) - entry := skillsStore.Get(name) - fullName := name - if entry != nil && entry.Group != "" { - fullName = entry.Group + "/" + name - } - if removedNames[name] || removedNames[fullName] { - skillsStore.Remove(name) - continue - } - // When a group directory is uninstalled, also remove its member skills by group field - if entry != nil && entry.Group != "" { - for rn := range removedNames { - if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { - skillsStore.Remove(name) - break - } - } - } - } + skillsStore.RemoveByNames(removedNames) if saveErr := skillsStore.Save(cfg.Source); saveErr != nil { ui.Warning("Failed to update metadata after uninstall: %v", saveErr) } diff --git a/cmd/skillshare/uninstall_project.go b/cmd/skillshare/uninstall_project.go index 72c98be7..819e2da5 100644 --- a/cmd/skillshare/uninstall_project.go +++ b/cmd/skillshare/uninstall_project.go @@ -420,27 +420,7 @@ func cmdUninstallProject(args []string, root string) error { for _, t := range succeeded { removedNames[t.name] = true } - for _, name := range skillsStore.List() { - // Direct match by bare name or full path (group/name) - entry := skillsStore.Get(name) - fullName := name - if entry != nil && entry.Group != "" { - fullName = entry.Group + "/" + name - } - if removedNames[name] || removedNames[fullName] { - skillsStore.Remove(name) - continue - } - // When a group directory is uninstalled, also remove its member skills by group field - if entry != nil && entry.Group != "" { - for rn := range removedNames { - if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { - skillsStore.Remove(name) - break - } - } - } - } + skillsStore.RemoveByNames(removedNames) if saveErr := skillsStore.Save(sourceDir); saveErr != nil { ui.Warning("Failed to update metadata after uninstall: %v", saveErr) } diff --git a/cmd/skillshare/update.go b/cmd/skillshare/update.go index 6f8ffcce..d8f91018 100644 --- a/cmd/skillshare/update.go +++ b/cmd/skillshare/update.go @@ -260,6 +260,8 @@ func cmdUpdate(args []string) error { return fmt.Errorf("failed to scan skills: %w", err) } } else { + // Load store once for name resolution + nameStore, _ := install.LoadMetadata(cfg.Source) // Resolve by specific names/groups for _, name := range opts.names { // Glob pattern matching (e.g. "core-*", "_team-?") @@ -283,7 +285,7 @@ func cmdUpdate(args []string) error { continue } - if isGroupDir(name, cfg.Source) { + if isGroupDir(name, cfg.Source, nameStore) { groupMatches, groupErr := resolveGroupUpdatable(name, cfg.Source) if groupErr != nil { resolveWarnings = append(resolveWarnings, fmt.Sprintf("%s: %v", name, groupErr)) diff --git a/cmd/skillshare/update_project.go b/cmd/skillshare/update_project.go index 2a47c9d3..6def116e 100644 --- a/cmd/skillshare/update_project.go +++ b/cmd/skillshare/update_project.go @@ -65,7 +65,7 @@ func cmdUpdateProjectBatch(sourcePath string, opts *updateOptions, projectRoot s // Check group directory first (before repo/skill lookup, // so "feature-radar" expands to all skills rather than // matching a single nested "feature-radar/feature-radar"). - if isGroupDir(name, sourcePath) { + if isGroupDir(name, sourcePath, metaStore) { groupMatches, groupErr := resolveGroupUpdatable(name, sourcePath) if groupErr != nil { resolveWarnings = append(resolveWarnings, fmt.Sprintf("%s: %v", name, groupErr)) diff --git a/cmd/skillshare/update_resolve.go b/cmd/skillshare/update_resolve.go index 3616b9a6..b0d7898a 100644 --- a/cmd/skillshare/update_resolve.go +++ b/cmd/skillshare/update_resolve.go @@ -99,6 +99,9 @@ func resolveGroupUpdatable(group, sourceDir string) ([]updateTarget, error) { return nil, fmt.Errorf("group '%s' resolves outside source directory", group) } + // Load store once before walk (not per iteration) + store, _ := install.LoadMetadata(resolvedSourceDir) + var matches []updateTarget if walkErr := filepath.Walk(walkRoot, func(path string, fi os.FileInfo, err error) error { if err != nil { @@ -123,7 +126,6 @@ func resolveGroupUpdatable(group, sourceDir string) ([]updateTarget, error) { } // Skill with metadata (centralized store) - store, _ := install.LoadMetadata(resolvedSourceDir) if entry := store.GetByPath(rel); entry != nil && entry.Source != "" { matches = append(matches, updateTarget{name: rel, path: path, isRepo: false, meta: entry}) return filepath.SkipDir @@ -140,7 +142,7 @@ func resolveGroupUpdatable(group, sourceDir string) ([]updateTarget, error) { // isGroupDir checks if a name corresponds to a group directory (a container // for other skills). Returns false for tracked repos, skills with metadata, // and directories that are themselves a skill (have SKILL.md). -func isGroupDir(name, sourceDir string) bool { +func isGroupDir(name, sourceDir string, store *install.MetadataStore) bool { path := filepath.Join(sourceDir, name) info, err := os.Stat(path) if err != nil || !info.IsDir() { @@ -151,7 +153,6 @@ func isGroupDir(name, sourceDir string) bool { return false } // Not a skill with metadata - store, _ := install.LoadMetadata(sourceDir) if entry := store.Get(name); entry != nil && entry.Source != "" { return false } diff --git a/cmd/skillshare/update_test.go b/cmd/skillshare/update_test.go index 9e0c518c..e62d0bf4 100644 --- a/cmd/skillshare/update_test.go +++ b/cmd/skillshare/update_test.go @@ -90,7 +90,9 @@ func setupUpdatableSkill(t *testing.T, sourceDir, name string) { dir := filepath.Join(sourceDir, name) os.MkdirAll(dir, 0755) os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("# "+name), 0644) - install.WriteMeta(dir, &install.SkillMeta{Source: "github.com/test/" + name, Type: "github"}) + store, _ := install.LoadMetadata(sourceDir) + store.Set(name, &install.MetadataEntry{Source: "github.com/test/" + name, Type: "github"}) + store.Save(sourceDir) } func TestResolveByGlob_MatchesTrackedRepos(t *testing.T) { diff --git a/internal/install/install.go b/internal/install/install.go index 098f2a19..72efbcbd 100644 --- a/internal/install/install.go +++ b/internal/install/install.go @@ -28,6 +28,7 @@ type InstallOptions struct { AuditProjectRoot string // Project root for project-mode audit rule resolution Quiet bool // Suppress per-skill output in InstallFromConfig Branch string // Git branch to clone from (empty = remote default) + SourceDir string // Skills root dir for centralized metadata (set by caller) } // IsAgentMode returns true if explicitly installing agents. diff --git a/internal/install/install_apply.go b/internal/install/install_apply.go index 805b7e80..5f14031a 100644 --- a/internal/install/install_apply.go +++ b/internal/install/install_apply.go @@ -5,6 +5,7 @@ import ( "fmt" "os" "path/filepath" + "strings" "skillshare/internal/utils" ) @@ -26,6 +27,20 @@ func buildDiscoverySkillSource(source *Source, skillPath string) string { } func installImpl(source *Source, destPath string, opts InstallOptions) (*InstallResult, error) { + // Derive SourceDir from destPath if not set by caller. + // destPath = sourceDir[/into]/skillName, so strip Into + skillName. + if opts.SourceDir == "" { + dir := filepath.Dir(destPath) + if opts.Into != "" { + // Strip the --into prefix from the parent + dir = filepath.Dir(dir) + for i := strings.Count(opts.Into, "/"); i > 0; i-- { + dir = filepath.Dir(dir) + } + } + opts.SourceDir = dir + } + result := &InstallResult{ SkillName: source.Name, Source: source.Raw, @@ -102,7 +117,7 @@ func installFromLocal(source *Source, destPath string, result *InstallResult, op if hashes, hashErr := ComputeFileHashes(destPath); hashErr == nil { meta.FileHashes = hashes } - if err := WriteMeta(destPath, meta); err != nil { + if err := WriteMetaToStore(opts.SourceDir, destPath, meta); err != nil { result.Warnings = append(result.Warnings, fmt.Sprintf("failed to write metadata: %v", err)) } @@ -144,7 +159,7 @@ func installFromGit(source *Source, destPath string, result *InstallResult, opts if hashes, hashErr := ComputeFileHashes(destPath); hashErr == nil { meta.FileHashes = hashes } - if err := WriteMeta(destPath, meta); err != nil { + if err := WriteMetaToStore(opts.SourceDir, destPath, meta); err != nil { result.Warnings = append(result.Warnings, fmt.Sprintf("failed to write metadata: %v", err)) } @@ -257,7 +272,7 @@ func installFromDiscoveryImpl(discovery *DiscoveryResult, skill SkillInfo, destP if hashes, hashErr := ComputeFileHashes(destPath); hashErr == nil { meta.FileHashes = hashes } - if err := WriteMeta(destPath, meta); err != nil { + if err := WriteMetaToStore(opts.SourceDir, destPath, meta); err != nil { result.Warnings = append(result.Warnings, fmt.Sprintf("failed to write metadata: %v", err)) } @@ -366,7 +381,7 @@ func installFromGitSubdir(source *Source, destPath string, result *InstallResult if hashes, hashErr := ComputeFileHashes(destPath); hashErr == nil { meta.FileHashes = hashes } - if err := WriteMeta(destPath, meta); err != nil { + if err := WriteMetaToStore(opts.SourceDir, destPath, meta); err != nil { result.Warnings = append(result.Warnings, fmt.Sprintf("failed to write metadata: %v", err)) } diff --git a/internal/install/install_local_test.go b/internal/install/install_local_test.go index fb9594b1..f46dc58d 100644 --- a/internal/install/install_local_test.go +++ b/internal/install/install_local_test.go @@ -46,8 +46,9 @@ func TestInstall_LocalPath_Basic(t *testing.T) { t.Error("expected SKILL.md to exist in destination") } - // Verify metadata was written - if !HasMeta(destDir) { + // Verify metadata was written to centralized store + store, _ := LoadMetadata(filepath.Dir(destDir)) + if !store.Has(filepath.Base(destDir)) { t.Error("expected metadata to be written") } } @@ -162,17 +163,15 @@ func TestInstall_LocalPath_WritesFileHashes(t *testing.T) { t.Fatal(err) } - meta, err := ReadMeta(destDir) - if err != nil { - t.Fatal(err) - } - if meta == nil { + store, _ := LoadMetadata(filepath.Dir(destDir)) + entry := store.Get(filepath.Base(destDir)) + if entry == nil { t.Fatal("expected meta to exist") } - if len(meta.FileHashes) < 2 { - t.Errorf("expected at least 2 file hashes (SKILL.md + helpers.sh), got %d", len(meta.FileHashes)) + if len(entry.FileHashes) < 2 { + t.Errorf("expected at least 2 file hashes (SKILL.md + helpers.sh), got %d", len(entry.FileHashes)) } - for _, hash := range meta.FileHashes { + for _, hash := range entry.FileHashes { if len(hash) < 7 || hash[:7] != "sha256:" { t.Errorf("expected sha256: prefixed hash, got %q", hash) } diff --git a/internal/install/meta.go b/internal/install/meta.go index 3fdc37bc..1cc3bf8c 100644 --- a/internal/install/meta.go +++ b/internal/install/meta.go @@ -38,9 +38,6 @@ func (m *SkillMeta) EffectiveKind() string { // Deprecated: WriteMeta writes per-skill sidecar files. // New code should use MetadataStore.Set() + MetadataStore.Save() instead. -// -// For backward compatibility during migration, WriteMeta also writes to the -// centralized .metadata.json in the source root directory. func WriteMeta(skillPath string, meta *SkillMeta) error { metaPath := filepath.Join(skillPath, MetaFileName) @@ -53,72 +50,9 @@ func WriteMeta(skillPath string, meta *SkillMeta) error { return fmt.Errorf("failed to write metadata: %w", err) } - // Dual-write: also update centralized .metadata.json - writeMetaToCentralized(skillPath, meta) - return nil } -// writeMetaToCentralized writes an entry to the centralized .metadata.json store. -// Best-effort: errors are silently ignored since the sidecar is the primary write. -func writeMetaToCentralized(skillPath string, meta *SkillMeta) { - sourceDir := findSkillsRoot(skillPath) - if sourceDir == "" { - return - } - rel, err := filepath.Rel(sourceDir, skillPath) - if err != nil || rel == "." || strings.HasPrefix(rel, "..") { - return - } - - store, loadErr := LoadMetadata(sourceDir) - if loadErr != nil { - return - } - - // Split rel path into group + name (e.g., "frontend/pdf-skill" → group="frontend", name="pdf-skill") - rel = filepath.ToSlash(rel) - name := rel - group := "" - if idx := strings.LastIndex(rel, "/"); idx >= 0 { - group = rel[:idx] - name = rel[idx+1:] - } - - entry := &MetadataEntry{ - Source: meta.Source, - Kind: meta.Kind, - Type: meta.Type, - Group: group, - InstalledAt: meta.InstalledAt, - RepoURL: meta.RepoURL, - Subdir: meta.Subdir, - Version: meta.Version, - TreeHash: meta.TreeHash, - FileHashes: meta.FileHashes, - Branch: meta.Branch, - } - store.Set(name, entry) - _ = store.Save(sourceDir) -} - -// findSkillsRoot walks up from skillPath to find the ancestor directory named "skills". -// Returns "" if not found. -func findSkillsRoot(skillPath string) string { - dir := filepath.Dir(skillPath) // start from parent - for { - base := filepath.Base(dir) - if base == "skills" { - return dir - } - parent := filepath.Dir(dir) - if parent == dir { - return "" - } - dir = parent - } -} - // Deprecated: ReadMeta reads per-skill sidecar files. // New code should use LoadMetadata() + MetadataStore.Get() instead. func ReadMeta(skillPath string) (*SkillMeta, error) { diff --git a/internal/install/metadata.go b/internal/install/metadata.go index 0e94f8c0..7a586a8f 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -13,6 +13,12 @@ import ( // MetadataFileName is the centralized metadata file stored in each directory. const MetadataFileName = ".metadata.json" +// Metadata kind constants for LoadMetadataWithMigration. +const ( + MetadataKindSkill = "" // default kind for skills directories + MetadataKindAgent = "agent" // kind for agents directories +) + // MetadataStore holds all entries for a single directory (skills/ or agents/). type MetadataStore struct { Version int `json:"version"` @@ -115,6 +121,72 @@ func (e *MetadataEntry) FullName() string { return e.Name } +// RemoveByNames removes entries matching the given names, including group members. +// Handles direct key matches, full-path matches (group/name), and group membership. +func (s *MetadataStore) RemoveByNames(names map[string]bool) { + for _, name := range s.List() { + entry := s.Get(name) + fullName := name + if entry != nil && entry.Group != "" { + fullName = entry.Group + "/" + name + } + if names[name] || names[fullName] { + s.Remove(name) + continue + } + // Group directory uninstall: remove member skills + if entry != nil && entry.Group != "" { + for rn := range names { + if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { + s.Remove(name) + break + } + } + } + } +} + +// WriteMetaToStore writes a SkillMeta to the centralized .metadata.json store. +// sourceDir is the skills root (if empty, defaults to parent of destPath). +// destPath is the installed skill path. +func WriteMetaToStore(sourceDir, destPath string, meta *SkillMeta) error { + if sourceDir == "" { + sourceDir = filepath.Dir(destPath) + } + rel, err := filepath.Rel(sourceDir, destPath) + if err != nil { + return fmt.Errorf("relative path: %w", err) + } + rel = filepath.ToSlash(rel) + + name := rel + group := "" + if idx := strings.LastIndex(rel, "/"); idx >= 0 { + group = rel[:idx] + name = rel[idx+1:] + } + + store, loadErr := LoadMetadata(sourceDir) + if loadErr != nil { + store = NewMetadataStore() + } + + store.Set(name, &MetadataEntry{ + Source: meta.Source, + Kind: meta.Kind, + Type: meta.Type, + Group: group, + InstalledAt: meta.InstalledAt, + RepoURL: meta.RepoURL, + Subdir: meta.Subdir, + Version: meta.Version, + TreeHash: meta.TreeHash, + FileHashes: meta.FileHashes, + Branch: meta.Branch, + }) + return store.Save(sourceDir) +} + // LoadMetadata reads .metadata.json from the given directory. // Returns an empty store (version 1) if the file does not exist. func LoadMetadata(dir string) (*MetadataStore, error) { diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index a23e0483..0c247c9a 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -151,6 +151,7 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { SkipAudit: body.SkipAudit, AuditThreshold: s.auditThreshold(), Branch: body.Branch, + SourceDir: s.cfg.Source, } if s.IsProjectMode() { installOpts.AuditProjectRoot = s.projectRoot @@ -327,6 +328,7 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { Into: body.Into, Branch: body.Branch, AuditThreshold: s.auditThreshold(), + SourceDir: s.cfg.Source, } if s.IsProjectMode() { installOpts.AuditProjectRoot = s.projectRoot diff --git a/tests/integration/search_batch_meta_roundtrip_test.go b/tests/integration/search_batch_meta_roundtrip_test.go index 5ea8419a..22084321 100644 --- a/tests/integration/search_batch_meta_roundtrip_test.go +++ b/tests/integration/search_batch_meta_roundtrip_test.go @@ -79,19 +79,20 @@ func TestSearchBatchGroupedInstall_MetadataSourceParseRoundTrip(t *testing.T) { } } + store, storeErr := install.LoadMetadata(sb.SourcePath) + if storeErr != nil { + t.Fatalf("load metadata: %v", storeErr) + } + for _, name := range []string{"alpha-skill", "beta-skill"} { - skillPath := filepath.Join(sb.SourcePath, name) - meta, err := install.ReadMeta(skillPath) - if err != nil { - t.Fatalf("read meta for %s: %v", name, err) - } - if meta == nil { + entry := store.Get(name) + if entry == nil { t.Fatalf("meta missing for %s", name) } - parsed, err := install.ParseSource(meta.Source) + parsed, err := install.ParseSource(entry.Source) if err != nil { - t.Fatalf("meta source for %s is not parseable: %q (%v)", name, meta.Source, err) + t.Fatalf("meta source for %s is not parseable: %q (%v)", name, entry.Source, err) } if parsed.CloneURL != "https://gitlab.com/team/monorepo.git" { t.Fatalf("unexpected clone URL for %s: got %q", name, parsed.CloneURL) @@ -99,7 +100,7 @@ func TestSearchBatchGroupedInstall_MetadataSourceParseRoundTrip(t *testing.T) { wantSubdir := "skills/" + name if parsed.Subdir != wantSubdir { - t.Fatalf("unexpected subdir for %s: got %q, want %q (source=%q)", name, parsed.Subdir, wantSubdir, meta.Source) + t.Fatalf("unexpected subdir for %s: got %q, want %q (source=%q)", name, parsed.Subdir, wantSubdir, entry.Source) } } } From 9025fb35bf48e7dc148088502c245cd8359d4703 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 15:12:56 +0800 Subject: [PATCH 087/205] docs: update website docs for centralized .metadata.json format --- .../docs/getting-started/quick-reference.md | 2 +- website/docs/how-to/advanced/migration.md | 6 +- website/docs/how-to/advanced/security.md | 2 +- .../docs/reference/appendix/file-structure.md | 58 +++++++---------- website/docs/reference/appendix/index.md | 2 +- website/docs/reference/commands/doctor.md | 4 +- website/docs/reference/commands/update.md | 4 +- .../docs/reference/targets/configuration.md | 28 ++++++--- website/docs/understand/agents.md | 2 +- website/docs/understand/audit-engine.md | 6 +- .../docs/understand/declarative-manifest.md | 62 +++++++++++-------- website/docs/understand/project-skills.md | 38 +++++++----- website/docs/understand/skill-format.md | 24 ++++--- .../docs/understand/tracked-repositories.md | 20 +++--- 14 files changed, 137 insertions(+), 121 deletions(-) diff --git a/website/docs/getting-started/quick-reference.md b/website/docs/getting-started/quick-reference.md index 17786ede..87a41644 100644 --- a/website/docs/getting-started/quick-reference.md +++ b/website/docs/getting-started/quick-reference.md @@ -176,7 +176,7 @@ make playground-down # stop container | Path | Description | |------|-------------| | `~/.config/skillshare/config.yaml` | Configuration file | -| `~/.config/skillshare/skills/registry.yaml` | Installed skill registry (auto-managed) | +| `~/.config/skillshare/skills/.metadata.json` | Installed skill metadata (auto-managed) | | `~/.config/skillshare/skills/` | Skill source directory | | `~/.config/skillshare/agents/` | Agent source directory | | `~/.config/skillshare/extras//` | Extras source directories | diff --git a/website/docs/how-to/advanced/migration.md b/website/docs/how-to/advanced/migration.md index f6f3c9b4..b287efd0 100644 --- a/website/docs/how-to/advanced/migration.md +++ b/website/docs/how-to/advanced/migration.md @@ -67,10 +67,8 @@ skillshare collect --all **Option B: Reinstall from source** (gets latest versions) ```bash -# Find original sources from skill metadata -cat ~/.config/skillshare/skills/pdf/.skillshare-meta.json -# or check the registry -cat ~/.config/skillshare/skills/registry.yaml +# Check the metadata +cat ~/.config/skillshare/skills/.metadata.json # Reinstall skillshare install anthropics/skills/skills/pdf diff --git a/website/docs/how-to/advanced/security.md b/website/docs/how-to/advanced/security.md index 7d5bf098..c767c8e5 100644 --- a/website/docs/how-to/advanced/security.md +++ b/website/docs/how-to/advanced/security.md @@ -46,7 +46,7 @@ flowchart TD I1["skillshare install <source>"] --> I2{"Install mode"} I2 -- "Regular skill" --> I3{"Audit scan"} I3 -- "At/above threshold" --> I4["Blocked (unless --force) ✗"] - I3 -- "Pass / --force" --> I5["Write .skillshare-meta.json
(sha256 per file)"] + I3 -- "Pass / --force" --> I5["Record in .metadata.json
(sha256 per file)"] I5 --> I6["Installed skill ✓"] I2 -- "Tracked repo (--track)" --> I7["Clone repo with .git"] I7 --> I8{"Audit full repo
(same threshold)"} diff --git a/website/docs/reference/appendix/file-structure.md b/website/docs/reference/appendix/file-structure.md index 1751ace1..c9eff76c 100644 --- a/website/docs/reference/appendix/file-structure.md +++ b/website/docs/reference/appendix/file-structure.md @@ -12,11 +12,10 @@ Directory layout and file locations for skillshare. ~/.config/skillshare/ # XDG_CONFIG_HOME ├── config.yaml # Configuration file ├── audit-rules.yaml # Custom audit rules (optional) -├── skills/ # Source directory (skills + registry) -│ ├── registry.yaml # Installed skill registry (auto-managed) +├── skills/ # Source directory (skills + metadata) +│ ├── .metadata.json # Installed skill metadata (auto-managed) │ ├── my-skill/ # Regular skill │ │ ├── SKILL.md # Skill definition (required) -│ │ └── .skillshare-meta.json # Install metadata (auto-generated) │ ├── code-review/ # Another skill │ │ └── SKILL.md │ └── _team-skills/ # Tracked repository @@ -99,29 +98,32 @@ See [Configuration](/docs/reference/targets/configuration) for full reference. --- -## Registry File +## Metadata File ### Location ``` -~/.config/skillshare/skills/registry.yaml +~/.config/skillshare/skills/.metadata.json ``` Stores metadata about installed and tracked skills. Lives inside the source directory so it can be synced via git for multi-machine setups. **Auto-managed** by `install`, `uninstall`, and `update` — don't edit manually. -:::note Migration -In v0.19+, `registry.yaml` moved from the config directory to the source directory. The migration is automatic on first run — no manual action needed. -::: - ### Contents -```yaml -# yaml-language-server: $schema=https://raw.githubusercontent.com/runkids/skillshare/main/schemas/registry.schema.json -skills: - - name: pdf - source: anthropics/skills/skills/pdf - - name: _team-skills - source: github.com/team/skills +```json +{ + "skills": [ + { + "name": "pdf", + "source": "anthropics/skills/skills/pdf" + }, + { + "name": "_team-skills", + "source": "github.com/team/skills", + "tracked": true + } + ] +} ``` Each entry records the skill name and its install source. Tracked repos (prefixed with `_`) include the full repository URL for `update` and `check` operations. @@ -145,9 +147,9 @@ Each entry records the skill name and its install source. Tracked repos (prefixe ``` skills/ +├── .metadata.json # Centralized skill metadata (auto-managed) ├── skill-name/ # Skill directory │ ├── SKILL.md # Required: skill definition -│ ├── .skillshare-meta.json # Optional: install metadata │ ├── examples/ # Optional: example files │ └── templates/ # Optional: code templates ├── frontend/ # Category folder (via --into or manual) @@ -224,22 +226,6 @@ This file should **not** be committed to version control — add it to `.gitigno When active, `sync -v`, `status`, and `doctor` display a `.local active` indicator. -### .skillshare-meta.json (Auto-generated) - -Metadata about where the skill was installed from: - -```json -{ - "source": "github.com/org/repo/path/to/skill", - "type": "github", - "installed_at": "2026-01-20T15:30:00Z", - "repo_url": "https://github.com/org/repo.git", - "subdir": "path/to/skill", - "version": "abc1234" -} -``` - -**Don't edit this file manually.** It's used by `skillshare update` and `skillshare check`. --- @@ -393,7 +379,7 @@ See [Environment Variables](./environment-variables.md#xdg_config_home) for deta | Item | Path | |------|------| | Config | `~/.config/skillshare/config.yaml` | -| Registry | `~/.config/skillshare/skills/registry.yaml` | +| Metadata | `~/.config/skillshare/skills/.metadata.json` | | Source | `~/.config/skillshare/skills/` | | Backups | `~/.local/share/skillshare/backups/` | | Trash | `~/.local/share/skillshare/trash/` | @@ -407,7 +393,7 @@ See [Environment Variables](./environment-variables.md#xdg_config_home) for deta | Item | Path | |------|------| | Config | `%AppData%\skillshare\config.yaml` | -| Registry | `%AppData%\skillshare\skills\registry.yaml` | +| Metadata | `%AppData%\skillshare\skills\.metadata.json` | | Source | `%AppData%\skillshare\skills\` | | Backups | `%AppData%\skillshare\backups\` | | Trash | `%AppData%\skillshare\trash\` | @@ -422,7 +408,7 @@ skillshare follows the [XDG Base Directory Specification](https://specifications | XDG Variable | Default Path | skillshare Uses For | |-------------|-------------|---------------------| -| `XDG_CONFIG_HOME` | `~/.config` | `skillshare/config.yaml`, `skillshare/skills/` (includes `registry.yaml`) | +| `XDG_CONFIG_HOME` | `~/.config` | `skillshare/config.yaml`, `skillshare/skills/` (includes `.metadata.json`) | | `XDG_DATA_HOME` | `~/.local/share` | `skillshare/backups/`, `skillshare/trash/` | | `XDG_STATE_HOME` | `~/.local/state` | `skillshare/logs/` | | `XDG_CACHE_HOME` | `~/.cache` | `skillshare/ui/` (downloaded web dashboard) | diff --git a/website/docs/reference/appendix/index.md b/website/docs/reference/appendix/index.md index 06ef0586..0ab75795 100644 --- a/website/docs/reference/appendix/index.md +++ b/website/docs/reference/appendix/index.md @@ -24,7 +24,7 @@ Technical reference appendix for skillshare. |------|---------| | `~/.config/skillshare/config.yaml` | Configuration file | | `~/.config/skillshare/skills/` | Source directory (your skills) | -| `~/.config/skillshare/skills/registry.yaml` | Installed skill registry (auto-managed) | +| `~/.config/skillshare/skills/.metadata.json` | Installed skill metadata (auto-managed) | | `~/.local/share/skillshare/backups/` | Backup directory | | `~/.local/share/skillshare/trash/` | Soft-deleted skills | | `~/.local/state/skillshare/logs/` | Operation and audit logs | diff --git a/website/docs/reference/commands/doctor.md b/website/docs/reference/commands/doctor.md index 3352cc3b..60a36b4d 100644 --- a/website/docs/reference/commands/doctor.md +++ b/website/docs/reference/commands/doctor.md @@ -84,11 +84,11 @@ For each target: ### Skill Integrity -For tracked skills with `.skillshare-meta.json` file hashes, doctor verifies that no files have been tampered with since installation: +For installed skills with file hash metadata, doctor verifies that no files have been tampered with since installation: - Compares current SHA-256 hashes against stored hashes - Reports modified, missing, and added files per skill -- Local skills (no `.skillshare-meta.json`) are silently skipped — this is expected +- Local skills (not in `.metadata.json`) are silently skipped — this is expected - Installed skills with metadata but missing `file_hashes` are flagged with their names ```text diff --git a/website/docs/reference/commands/update.md b/website/docs/reference/commands/update.md index 813da9e5..33a42659 100644 --- a/website/docs/reference/commands/update.md +++ b/website/docs/reference/commands/update.md @@ -326,8 +326,8 @@ skillshare update --all -p --skip-audit # Skip security audit gate | Type | Method | Detected by | |------|--------|-------------| | **Tracked repo** (`_repo`) | `git pull` | Has `.git/` directory | -| **Remote skill** (with metadata) | Reinstall from source | Has `.skillshare-meta.json` | -| **Local skill** | Skipped | No metadata | +| **Remote skill** (with metadata) | Reinstall from source | Listed in `.metadata.json` | +| **Local skill** | Skipped | Not listed in `.metadata.json` | The `_` prefix is optional — `skillshare update team-skills -p` auto-detects `_team-skills`. diff --git a/website/docs/reference/targets/configuration.md b/website/docs/reference/targets/configuration.md index 344d9a84..32ecf1d8 100644 --- a/website/docs/reference/targets/configuration.md +++ b/website/docs/reference/targets/configuration.md @@ -11,8 +11,8 @@ Configuration file reference for skillshare. ```text ~/.config/skillshare/ ├── config.yaml ← Configuration file -├── registry.yaml ← Skill registry (auto-managed) ├── skills/ ← Source directory (your skills) +│ ├── .metadata.json ← Skill metadata (auto-managed) │ ├── my-skill/ │ ├── another/ │ └── _team-repo/ ← Tracked repository @@ -375,8 +375,8 @@ When you run `skillshare install` with no arguments, all listed skills that aren The `skills:` list is automatically updated after each `install` and `uninstall` operation. You don't need to edit it manually. -:::note Migrated to registry.yaml -Starting from v0.16.2, installed skill entries are stored in a separate `registry.yaml` file instead of inside `config.yaml`. Existing `skills:` entries in `config.yaml` are migrated automatically on first run. The schema and fields remain the same. +:::note Migrated to .metadata.json +Starting from v0.16.2, installed skill entries moved from `config.yaml` to a separate file. In the current version, all installation metadata is stored in a centralized `.metadata.json` inside the `skills/` directory. Migration from older formats (`registry.yaml`, per-skill `.skillshare-meta.json`) is automatic on first run. ::: ### `extras` {#extras} @@ -665,21 +665,29 @@ SKILLSHARE_CONFIG=~/custom-config.yaml skillshare status ## Skill Metadata -When you install a skill, skillshare creates a `.skillshare-meta.json` file: +When you install a skill, skillshare records its metadata in the centralized `.metadata.json` file: ```json { - "source": "anthropics/skills/skills/pdf", - "type": "github", - "installed_at": "2026-01-20T15:30:00Z", - "repo_url": "https://github.com/anthropics/skills.git", - "subdir": "skills/pdf", - "version": "abc1234" + "skills": [ + { + "name": "pdf", + "source": "anthropics/skills/skills/pdf", + "type": "github", + "installed_at": "2026-01-20T15:30:00Z", + "repo_url": "https://github.com/anthropics/skills.git", + "subdir": "skills/pdf", + "version": "abc1234" + } + ] } ``` +Each skill entry includes: + | Field | Description | |-------|-------------| +| `name` | Skill directory name | | `source` | Original install source input | | `type` | Source type (`github`, `local`, etc.) | | `installed_at` | Installation timestamp | diff --git a/website/docs/understand/agents.md b/website/docs/understand/agents.md index 13b01186..ac526d87 100644 --- a/website/docs/understand/agents.md +++ b/website/docs/understand/agents.md @@ -230,4 +230,4 @@ skillshare sync -p ``` Project agent source: `.skillshare/agents/` -Installed agents (tracked) get `.skillshare-meta.json` sidecar files and `.gitignore` entries, same as tracked skills. +Installed agents (tracked) are recorded in `.metadata.json` and `.gitignore` entries are created, same as tracked skills. diff --git a/website/docs/understand/audit-engine.md b/website/docs/understand/audit-engine.md index a97cc39e..4e432727 100644 --- a/website/docs/understand/audit-engine.md +++ b/website/docs/understand/audit-engine.md @@ -100,7 +100,7 @@ These patterns are **suspicious in context** — they may be legitimate but dese ### MEDIUM: Content Integrity -Skills installed or updated via `skillshare install` or `skillshare update` have their file hashes recorded in `.skillshare-meta.json`. On subsequent audits, the engine verifies content integrity: +Skills installed or updated via `skillshare install` or `skillshare update` have their file hashes recorded in `.metadata.json`. On subsequent audits, the engine verifies content integrity: | Pattern | Severity | Description | |---------|----------|------------| @@ -113,7 +113,7 @@ Skills installed or updated via `skillshare install` or `skillshare update` have ### MEDIUM: Metadata Trust Verification -The `metadata` analyzer cross-references SKILL.md metadata against the actual git source URL from `.skillshare-meta.json` to detect social-engineering patterns in the supply chain: +The `metadata` analyzer cross-references SKILL.md metadata against the actual git source URL from `.metadata.json` to detect social-engineering patterns in the supply chain: | Pattern | Severity | Description | |---------|----------|------------| @@ -393,7 +393,7 @@ When analyzability drops below **70%**, the audit engine emits an `INFO`-level f Files excluded from the calculation: - Binary files (images, `.wasm`, etc.) - Files exceeding 1 MB -- `.skillshare-meta.json` (internal metadata) +- `.metadata.json` (internal metadata) ### Output diff --git a/website/docs/understand/declarative-manifest.md b/website/docs/understand/declarative-manifest.md index d0dffb42..fd54c939 100644 --- a/website/docs/understand/declarative-manifest.md +++ b/website/docs/understand/declarative-manifest.md @@ -12,26 +12,33 @@ Use the declarative manifest when you want reproducible skill setups across mach ## What Is a Skill Manifest? -The `skills:` section in `registry.yaml` serves as a **portable declaration** of your skill collection. Instead of manually installing skills one by one, you list them in the registry and run `skillshare install` to bring everything up. - -```yaml -# ~/.config/skillshare/skills/registry.yaml (global) -# or .skillshare/registry.yaml (project) -skills: - - name: react-best-practices - source: anthropics/skills/skills/react-best-practices - group: frontend # installed into frontend/ - - name: _team-skills - source: my-org/shared-skills - tracked: true - group: devops # installed into devops/ - - name: commit - source: anthropics/skills/skills/commit # no group → root level +The `skills:` section in `.metadata.json` serves as a **portable declaration** of your skill collection. Instead of manually installing skills one by one, you list them in the metadata and run `skillshare install` to bring everything up. + +```json +{ + "skills": [ + { + "name": "react-best-practices", + "source": "anthropics/skills/skills/react-best-practices", + "group": "frontend" + }, + { + "name": "_team-skills", + "source": "my-org/shared-skills", + "tracked": true, + "group": "devops" + }, + { + "name": "commit", + "source": "anthropics/skills/skills/commit" + } + ] +} ``` :::note Migration notes -- **From config.yaml**: In older versions, `skills:` lived inside `config.yaml`. Skillshare automatically migrates it to `registry.yaml` on first load. -- **From config dir**: In v0.19+, `registry.yaml` moved from the config directory (`~/.config/skillshare/`) to the source directory (`~/.config/skillshare/skills/`) so it can be synced via git. The migration is automatic — no manual action required. +- **From config.yaml**: In older versions, `skills:` lived inside `config.yaml`. Skillshare automatically migrates it to `.metadata.json` on first load. +- **From registry.yaml**: In v0.19+, skill records moved from `registry.yaml` to `.metadata.json` as the unified metadata store. The migration is automatic — no manual action required. ::: ## How It Works @@ -41,10 +48,10 @@ skills: Running `skillshare install` with **no arguments** reads the manifest and installs all listed skills: ```bash -# Global mode — installs all skills from ~/.config/skillshare/skills/registry.yaml +# Global mode — installs all skills from ~/.config/skillshare/skills/.metadata.json skillshare install -# Project mode — installs from .skillshare/registry.yaml +# Project mode — installs from .skillshare/.metadata.json skillshare install -p # Preview without installing @@ -57,14 +64,14 @@ Skills that already exist are skipped automatically. The manifest stays in sync with your actual skill collection: -- **`skillshare install `** — adds the installed skill to `registry.yaml` automatically -- **`skillshare uninstall ...`** — removes the entry from `registry.yaml` automatically +- **`skillshare install `** — adds the installed skill to `.metadata.json` automatically +- **`skillshare uninstall ...`** — removes the entry from `.metadata.json` automatically You never need to edit the manifest manually (though you can). ## Skill Entry Fields -Each entry in the `skills:` list in `registry.yaml` has these fields: +Each entry in the `skills:` list in `.metadata.json` has these fields: | Field | Required | Description | |-------|----------|-------------| @@ -94,7 +101,7 @@ skillshare sync # distribute to all targets New team members get the same AI context in one command: ```bash -# .skillshare/registry.yaml is committed to the repo +# .skillshare/.metadata.json is committed to the repo git clone cd skillshare install -p # installs all declared skills @@ -106,8 +113,9 @@ skillshare sync -p # links to project targets Project maintainers declare recommended skills: ```yaml -# .skillshare/registry.yaml -skills: +# .skillshare/.metadata.json +{ + "skills": - name: react-best-practices source: anthropics/skills/skills/react-best-practices group: frontend @@ -122,7 +130,7 @@ When you install with `--into`, the group is recorded automatically: ```bash skillshare install anthropics/skills/skills/pdf --into frontend -# registry.yaml will contain: name: pdf, group: frontend +# .metadata.json will contain: name: pdf, group: frontend ``` Running `skillshare install` (no args) recreates the same directory structure from the manifest. @@ -133,7 +141,7 @@ Contributors clone and run `skillshare install -p` to get project-specific AI co ## Workflow Summary ``` -1. Install skills normally → registry.yaml auto-updates +1. Install skills normally → .metadata.json auto-updates 2. Push/pull config via git → portable across machines 3. Run `skillshare install` → reproduce on new machine 4. Run `skillshare sync` → distribute to all targets diff --git a/website/docs/understand/project-skills.md b/website/docs/understand/project-skills.md index 11a61e40..ced19554 100644 --- a/website/docs/understand/project-skills.md +++ b/website/docs/understand/project-skills.md @@ -20,7 +20,7 @@ Use project skills when your team needs repo-specific AI instructions (coding st | **Project tooling** | CI/CD deployment knowledge, testing patterns, migration scripts specific to this repo | | **Onboarding acceleration** | "How does auth work here?" — the AI already knows, from committed project skills | | **Open source projects** | Maintainers commit `.skillshare/` so contributors get project-specific AI context on clone | -| **Community skill curation** | A repo's `registry.yaml` serves as a curated skill list — anyone can `install -p` to get the same setup | +| **Community skill curation** | A repo's `.metadata.json` serves as a curated skill list — anyone can `install -p` to get the same setup | --- @@ -86,7 +86,7 @@ skillshare sync -g # Force global mode / ├── .skillshare/ │ ├── config.yaml # Targets + settings (incl. extras) -│ ├── registry.yaml # Remote skills list (auto-managed) +│ ├── .metadata.json # Remote skills list (auto-managed) │ ├── .gitignore # Ignores logs/, trash/, and cloned remote/tracked skill dirs │ ├── extras/ # Extras source directories │ │ └── rules/ # e.g. extras init rules --target .claude/rules -p @@ -95,12 +95,10 @@ skillshare sync -g # Force global mode │ ├── my-local-skill/ # Created manually or via `skillshare new` │ │ └── SKILL.md │ ├── remote-skill/ # Installed via `skillshare install -p` -│ │ ├── SKILL.md -│ │ └── .skillshare-meta.json +│ │ └── SKILL.md │ ├── tools/ # Category folder (via --into tools) │ │ └── pdf/ # Installed via `skillshare install ... --into tools -p` -│ │ ├── SKILL.md -│ │ └── .skillshare-meta.json +│ │ └── SKILL.md │ └── _team-skills/ # Installed via `skillshare install --track -p` │ ├── .git/ # Git history preserved │ ├── frontend/ui/ @@ -141,16 +139,22 @@ targets: - **Short**: Just the target name (e.g., `claude`). Uses known default path, merge mode. - **Long**: Object with `name`, optional `path`, optional `mode` (`merge`, `copy`, or `symlink`), and optional `include`/`exclude` filters. Supports relative paths (resolved from project root) and `~` expansion. -Remote skill installations are tracked in a separate file, `.skillshare/registry.yaml`: - -```yaml -# .skillshare/registry.yaml (auto-managed by install/uninstall) -skills: - - name: pdf-skill - source: anthropic/skills/pdf - - name: _team-skills - source: github.com/team/skills - tracked: true # Tracked repo: cloned with git history +Remote skill installations are tracked in a separate file, `.skillshare/.metadata.json`: + +```json +{ + "skills": [ + { + "name": "pdf-skill", + "source": "anthropic/skills/pdf" + }, + { + "name": "_team-skills", + "source": "github.com/team/skills", + "tracked": true + } + ] +} ``` **Skills** list tracks remote installations only. Local skills don't need entries here. @@ -158,7 +162,7 @@ skills: - `tracked: true`: Installed with `--track` (git repo with `.git/` preserved). When someone runs `skillshare install -p`, tracked skills are cloned with full git history so `skillshare update` works correctly. :::tip Portable Skill Manifest -`config.yaml` and `registry.yaml` together form a portable skill manifest in both global and project mode. In a project, commit them to git and anyone can run `skillshare install -p && skillshare sync`. For global mode, copy both files to a new machine and run `skillshare install && skillshare sync`. This works for teams, open source contributors, community templates, and dotfiles across machines. +`config.yaml` and `.metadata.json` together form a portable skill manifest in both global and project mode. In a project, commit them to git and anyone can run `skillshare install -p && skillshare sync`. For global mode, copy both files to a new machine and run `skillshare install && skillshare sync`. This works for teams, open source contributors, community templates, and dotfiles across machines. ::: --- diff --git a/website/docs/understand/skill-format.md b/website/docs/understand/skill-format.md index 589b784a..3d36c49c 100644 --- a/website/docs/understand/skill-format.md +++ b/website/docs/understand/skill-format.md @@ -261,23 +261,31 @@ Response: "The function looks correct but could benefit from type hints..." --- -## Skill Metadata File +## Centralized Metadata -When you install a skill, skillshare creates a `.skillshare-meta.json` file: +When you install a skill, skillshare records its metadata in `.metadata.json` (centralized for all skills): ```json { - "source": "anthropics/skills/skills/pdf", - "type": "github", - "installed_at": "2026-01-20T15:30:00Z", - "repo_url": "https://github.com/anthropics/skills.git", - "subdir": "skills/pdf", - "version": "abc1234" + "skills": [ + { + "name": "pdf", + "source": "anthropics/skills/skills/pdf", + "type": "github", + "installed_at": "2026-01-20T15:30:00Z", + "repo_url": "https://github.com/anthropics/skills.git", + "subdir": "skills/pdf", + "version": "abc1234" + } + ] } ``` +Each skill entry includes: + | Field | Description | |-------|-------------| +| `name` | Skill directory name | | `source` | Original install source input | | `type` | Source type (`github`, `local`, etc.) | | `installed_at` | Installation timestamp | diff --git a/website/docs/understand/tracked-repositories.md b/website/docs/understand/tracked-repositories.md index dc501836..67f1c618 100644 --- a/website/docs/understand/tracked-repositories.md +++ b/website/docs/understand/tracked-repositories.md @@ -154,14 +154,18 @@ skillshare uninstall _team-skills Tracked repos also work in project mode. The repo is cloned into `.skillshare/skills/` and added to `.skillshare/.gitignore` (so the tracked repo's git history doesn't conflict with your project's git). Project logs (`.skillshare/logs/`) and trash (`.skillshare/trash/`) are also ignored by default. -Installing a tracked repo auto-records `tracked: true` in `.skillshare/registry.yaml`, so new team members get the correct clone behavior via `skillshare install -p`: - -```yaml -# .skillshare/registry.yaml (auto-updated after install --track -p) -skills: - - name: _team-shared-skills - source: github.com/team/shared-skills - tracked: true +Installing a tracked repo auto-records `tracked: true` in `.skillshare/.metadata.json`, so new team members get the correct clone behavior via `skillshare install -p`: + +```json +{ + "skills": [ + { + "name": "_team-shared-skills", + "source": "github.com/team/shared-skills", + "tracked": true + } + ] +} ``` ```bash From 7290884977f0f66538abacb51f1206781d96e6f9 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 16:35:19 +0800 Subject: [PATCH 088/205] fix: use full-path metadata keys to prevent grouped skill collisions Metadata store entries for grouped skills (e.g. frontend/foo, backend/foo) were keyed by basename only, causing the second install to overwrite the first. This changes the key to the full relative path. Key changes: - WriteMetaToStore: use full rel path as key, clean up legacy basename key - Add KeyToRelPath() helper and MigrateLegacyKey() method for backward compat - Reconcile: migrate legacy basename keys to full-path keys on load - install_queries: guard against double-prefixing with KeyToRelPath - LoadMetadata: auto-run sidecar cleanup on every load (idempotent) - migrateAgentSidecars: recurse into subdirectories - mergeSkillSidecar: use full-path keys, detect tracked repos --- internal/config/project_reconcile.go | 16 ++- internal/config/project_reconcile_test.go | 9 +- internal/config/reconcile.go | 16 ++- internal/config/reconcile_test.go | 9 +- internal/install/install_queries.go | 13 +- internal/install/metadata.go | 87 +++++++++--- internal/install/metadata_migrate.go | 154 +++++++++++++++------- 7 files changed, 211 insertions(+), 93 deletions(-) diff --git a/internal/config/project_reconcile.go b/internal/config/project_reconcile.go index 3313de9f..18c1effd 100644 --- a/internal/config/project_reconcile.go +++ b/internal/config/project_reconcile.go @@ -53,20 +53,19 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store fullPath := filepath.ToSlash(relPath) - // Extract basename (key) and group from the relative path. - // The store uses basename as key and Group for the parent path. - name := fullPath + // Extract group from the relative path (e.g. "frontend/foo" → "frontend"). group := "" if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { group = fullPath[:idx] - name = fullPath[idx+1:] } // Determine source and tracked status var source string tracked := isGitRepo(path) - existing := store.Get(name) + // Look up using GetByPath which handles both full-path keys and + // legacy basename keys (for backward compatibility during migration). + existing := store.GetByPath(fullPath) if existing != nil && existing.Source != "" { source = existing.Source } else if tracked { @@ -78,7 +77,7 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store return nil } - live[name] = true + live[fullPath] = true // Determine branch: from store entry or git (tracked repos) var branch string @@ -89,6 +88,9 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store } if existing != nil { + if store.MigrateLegacyKey(fullPath, existing) { + changed = true + } if existing.Source != source { existing.Source = source changed = true @@ -112,7 +114,7 @@ func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store Branch: branch, Group: group, } - store.Set(name, entry) + store.Set(fullPath, entry) changed = true } diff --git a/internal/config/project_reconcile_test.go b/internal/config/project_reconcile_test.go index 809c391f..3779fe6d 100644 --- a/internal/config/project_reconcile_test.go +++ b/internal/config/project_reconcile_test.go @@ -147,13 +147,18 @@ func TestReconcileProjectSkills_NestedSkillSetsGroup(t *testing.T) { t.Fatalf("ReconcileProjectSkills failed: %v", err) } - entry := store.Get("my-skill") + // After reconcile, nested skills use full-path keys (e.g. "tools/my-skill"). + entry := store.Get("tools/my-skill") if entry == nil { - t.Fatal("expected store to have 'my-skill'") + t.Fatal("expected store to have 'tools/my-skill'") } if entry.Group != "tools" { t.Errorf("expected group 'tools', got %q", entry.Group) } + // Legacy basename key should be removed after migration. + if store.Has("my-skill") { + t.Error("expected legacy basename key 'my-skill' to be removed") + } } func TestReconcileProjectSkills_PrunesStaleEntries(t *testing.T) { diff --git a/internal/config/reconcile.go b/internal/config/reconcile.go index 49ccd6c9..479402c3 100644 --- a/internal/config/reconcile.go +++ b/internal/config/reconcile.go @@ -48,19 +48,18 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { fullPath := filepath.ToSlash(relPath) - // Extract basename (key) and group from the relative path. - // The store uses basename as key and Group for the parent path. - name := fullPath + // Extract group from the relative path (e.g. "frontend/foo" → "frontend"). group := "" if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { group = fullPath[:idx] - name = fullPath[idx+1:] } var source string tracked := isGitRepo(path) - existing := store.Get(name) + // Look up using GetByPath which handles both full-path keys and + // legacy basename keys (for backward compatibility during migration). + existing := store.GetByPath(fullPath) if existing != nil && existing.Source != "" { source = existing.Source } else if tracked { @@ -70,7 +69,7 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { return nil } - live[name] = true + live[fullPath] = true // Determine branch: from store entry or git (tracked repos) var branch string @@ -81,6 +80,9 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { } if existing != nil { + if store.MigrateLegacyKey(fullPath, existing) { + changed = true + } if existing.Source != source { existing.Source = source changed = true @@ -104,7 +106,7 @@ func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { Branch: branch, Group: group, } - store.Set(name, entry) + store.Set(fullPath, entry) changed = true } diff --git a/internal/config/reconcile_test.go b/internal/config/reconcile_test.go index 061db732..f4885ea2 100644 --- a/internal/config/reconcile_test.go +++ b/internal/config/reconcile_test.go @@ -170,13 +170,18 @@ func TestReconcileGlobalSkills_NestedSkillSetsGroup(t *testing.T) { t.Fatalf("ReconcileGlobalSkills failed: %v", err) } - entry := store.Get("pdf") + // After reconcile, nested skills use full-path keys (e.g. "frontend/pdf"). + entry := store.Get("frontend/pdf") if entry == nil { - t.Fatal("expected store to have 'pdf'") + t.Fatal("expected store to have 'frontend/pdf'") } if entry.Group != "frontend" { t.Errorf("expected group 'frontend', got %q", entry.Group) } + // Legacy basename key should be removed after migration. + if store.Has("pdf") { + t.Error("expected legacy basename key 'pdf' to be removed") + } } func TestReconcileGlobalSkills_PrunesStaleEntries(t *testing.T) { diff --git a/internal/install/install_queries.go b/internal/install/install_queries.go index 6fb377b6..102e3d86 100644 --- a/internal/install/install_queries.go +++ b/internal/install/install_queries.go @@ -25,12 +25,7 @@ func getUpdatableSkillsImpl(sourceDir string) ([]string, error) { if entry.Tracked { continue } - // Build the relative path: group/name or just name - relPath := name - if entry.Group != "" { - relPath = entry.Group + "/" + name - } - skills = append(skills, relPath) + skills = append(skills, KeyToRelPath(name, entry)) } return skills, nil } @@ -55,11 +50,7 @@ func FindRepoInstalls(sourceDir, cloneURL string) []string { continue } if repoURLsMatch(entry.RepoURL, cloneURL) { - relPath := name - if entry.Group != "" { - relPath = entry.Group + "/" + name - } - matches = append(matches, relPath) + matches = append(matches, KeyToRelPath(name, entry)) } } return matches diff --git a/internal/install/metadata.go b/internal/install/metadata.go index 7a586a8f..4b41eb8e 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -95,6 +95,27 @@ func (s *MetadataStore) GetByPath(relPath string) *MetadataEntry { return nil } +// KeyToRelPath returns the effective relative path for a store entry. +// For full-path keys it returns the key as-is; for legacy basename keys +// it prepends the entry's Group. +func KeyToRelPath(key string, entry *MetadataEntry) string { + if entry != nil && entry.Group != "" && !strings.HasPrefix(key, entry.Group+"/") { + return entry.Group + "/" + key + } + return key +} + +// MigrateLegacyKey promotes a legacy basename key to a full-path key. +// Returns true if migration occurred. No-op if the key is already full-path. +func (s *MetadataStore) MigrateLegacyKey(fullPath string, existing *MetadataEntry) bool { + if s.Has(fullPath) { + return false + } + s.Remove(filepath.Base(fullPath)) + s.Set(fullPath, existing) + return true +} + // List returns sorted entry names. func (s *MetadataStore) List() []string { names := make([]string, 0, len(s.Entries)) @@ -123,22 +144,32 @@ func (e *MetadataEntry) FullName() string { // RemoveByNames removes entries matching the given names, including group members. // Handles direct key matches, full-path matches (group/name), and group membership. +// Works with both legacy basename keys and full-path keys. func (s *MetadataStore) RemoveByNames(names map[string]bool) { - for _, name := range s.List() { - entry := s.Get(name) - fullName := name - if entry != nil && entry.Group != "" { - fullName = entry.Group + "/" + name - } - if names[name] || names[fullName] { - s.Remove(name) + for _, key := range s.List() { + entry := s.Get(key) + fullName := KeyToRelPath(key, entry) + if names[key] || names[fullName] { + s.Remove(key) continue } + // Also match by basename for backward compat (e.g. uninstall "foo" should + // remove full-path key "frontend/foo"). + if entry != nil && entry.Group != "" { + basename := key + if idx := strings.LastIndex(key, "/"); idx >= 0 { + basename = key[idx+1:] + } + if names[basename] { + s.Remove(key) + continue + } + } // Group directory uninstall: remove member skills if entry != nil && entry.Group != "" { for rn := range names { if entry.Group == rn || strings.HasPrefix(entry.Group, rn+"/") { - s.Remove(name) + s.Remove(key) break } } @@ -159,11 +190,10 @@ func WriteMetaToStore(sourceDir, destPath string, meta *SkillMeta) error { } rel = filepath.ToSlash(rel) - name := rel + // Extract group from relative path (e.g. "frontend/foo" → group "frontend"). group := "" if idx := strings.LastIndex(rel, "/"); idx >= 0 { group = rel[:idx] - name = rel[idx+1:] } store, loadErr := LoadMetadata(sourceDir) @@ -171,7 +201,17 @@ func WriteMetaToStore(sourceDir, destPath string, meta *SkillMeta) error { store = NewMetadataStore() } - store.Set(name, &MetadataEntry{ + // Use full relative path as key to avoid collisions between grouped + // skills with the same basename (e.g. "frontend/foo" vs "backend/foo"). + // Remove any legacy basename-only key for this group+basename pair. + if group != "" { + basename := rel[strings.LastIndex(rel, "/")+1:] + if old := store.Get(basename); old != nil && old.Group == group { + store.Remove(basename) + } + } + + store.Set(rel, &MetadataEntry{ Source: meta.Source, Kind: meta.Kind, Type: meta.Type, @@ -187,9 +227,9 @@ func WriteMetaToStore(sourceDir, destPath string, meta *SkillMeta) error { return store.Save(sourceDir) } -// LoadMetadata reads .metadata.json from the given directory. +// loadMetadataFile reads .metadata.json from the given directory (pure read, no migration). // Returns an empty store (version 1) if the file does not exist. -func LoadMetadata(dir string) (*MetadataStore, error) { +func loadMetadataFile(dir string) (*MetadataStore, error) { path := filepath.Join(dir, MetadataFileName) data, err := os.ReadFile(path) if err != nil { @@ -209,6 +249,21 @@ func LoadMetadata(dir string) (*MetadataStore, error) { return &store, nil } +// LoadMetadata reads .metadata.json and cleans up any lingering sidecar files. +// Sidecar migration is idempotent — if no sidecars exist, it's a fast no-op +// (one ReadDir per call). This ensures sidecars created after initial migration +// (e.g. by agent install) are always cleaned up regardless of which command runs. +func LoadMetadata(dir string) (*MetadataStore, error) { + store, err := loadMetadataFile(dir) + if err != nil { + return nil, err + } + if cleanupSidecars(store, dir) { + store.Save(dir) //nolint:errcheck + } + return store, nil +} + // Save writes .metadata.json atomically (temp file → rename). func (s *MetadataStore) Save(dir string) error { if err := os.MkdirAll(dir, 0755); err != nil { @@ -280,8 +335,8 @@ func (e *MetadataEntry) ComputeEntryHashes(skillPath string) error { // RefreshHashes recomputes file hashes for an entry that already has them. // No-op if entry doesn't exist or has no FileHashes. -func (s *MetadataStore) RefreshHashes(name, skillPath string) { - entry := s.Get(name) +func (s *MetadataStore) RefreshHashes(relPath, skillPath string) { + entry := s.GetByPath(relPath) if entry == nil || entry.FileHashes == nil { return } diff --git a/internal/install/metadata_migrate.go b/internal/install/metadata_migrate.go index 87d4a202..34c028ac 100644 --- a/internal/install/metadata_migrate.go +++ b/internal/install/metadata_migrate.go @@ -9,10 +9,32 @@ import ( "gopkg.in/yaml.v3" ) +// cleanupSidecars runs both skill and agent sidecar migration on dir. +// Returns true if any sidecars were found and cleaned up. +func cleanupSidecars(store *MetadataStore, dir string) bool { + before := storeFingerprint(store) + migrateSkillSidecars(store, dir) + migrateAgentSidecars(store, dir) + return storeFingerprint(store) != before +} + +// storeFingerprint returns a cheap fingerprint for change detection. +func storeFingerprint(s *MetadataStore) uint64 { + var h uint64 + for k, e := range s.Entries { + h += uint64(len(k)) + if e != nil && e.Source != "" { + h += uint64(len(e.Source)) + } + } + return h +} + // LoadMetadataWithMigration loads .metadata.json, or migrates from old format if needed. // kind is "" for skills directories, "agent" for agents directories. +// When .metadata.json already exists, LoadMetadata handles sidecar cleanup automatically. func LoadMetadataWithMigration(dir, kind string) (*MetadataStore, error) { - // Fast path: .metadata.json already exists + // Fast path: .metadata.json exists — LoadMetadata handles sidecar cleanup. metaPath := filepath.Join(dir, MetadataFileName) if _, err := os.Stat(metaPath); err == nil { return LoadMetadata(dir) @@ -21,7 +43,6 @@ func LoadMetadataWithMigration(dir, kind string) (*MetadataStore, error) { store := NewMetadataStore() // Phase 1: Migrate registry.yaml entries - // Look in dir itself and its parent (registry.yaml may live in .skillshare/ while dir is .skillshare/skills/) migrateRegistryEntries(store, dir, kind) if parent := filepath.Dir(dir); parent != dir { migrateRegistryEntries(store, parent, kind) @@ -167,10 +188,16 @@ func mergeSkillSidecar(store *MetadataStore, name, group, sidecarPath string) { return } - entry := store.Get(name) + // Use full-path key for grouped skills. + key := name + if group != "" { + key = group + "/" + name + } + + entry := store.Get(key) if entry == nil { entry = &MetadataEntry{} - store.Set(name, entry) + store.Set(key, entry) } // Merge sidecar fields — sidecar has richer data @@ -207,11 +234,25 @@ func mergeSkillSidecar(store *MetadataStore, name, group, sidecarPath string) { if group != "" && entry.Group == "" { entry.Group = group } + // Detect tracked repos: top-level parent starts with "_" + root := group + if idx := strings.Index(root, "/"); idx >= 0 { + root = root[:idx] + } + if len(root) > 0 && root[0] == '_' { + entry.Tracked = true + } } -// migrateAgentSidecars scans dir for *.skillshare-meta.json files, extracts agent name, -// reads as SkillMeta, merges into store with Kind="agent", removes old sidecar. +// migrateAgentSidecars recursively scans dir for *.skillshare-meta.json files, +// merges them into the centralized store with Kind="agent", and removes the sidecars. func migrateAgentSidecars(store *MetadataStore, dir string) { + walkAgentSidecars(store, dir, "") +} + +// walkAgentSidecars recursively walks dir for agent sidecar files. +// group is the parent prefix (empty for top-level, e.g. "demo" for agents/demo/). +func walkAgentSidecars(store *MetadataStore, dir, group string) { entries, err := os.ReadDir(dir) if err != nil { return @@ -220,6 +261,11 @@ func migrateAgentSidecars(store *MetadataStore, dir string) { const suffix = ".skillshare-meta.json" for _, de := range entries { if de.IsDir() { + subGroup := de.Name() + if group != "" { + subGroup = group + "/" + de.Name() + } + walkAgentSidecars(store, filepath.Join(dir, de.Name()), subGroup) continue } if !strings.HasSuffix(de.Name(), suffix) { @@ -230,54 +276,66 @@ func migrateAgentSidecars(store *MetadataStore, dir string) { if agentName == "" { continue } + // Use full-path key for grouped agents (e.g. "demo/reviewer") + key := agentName + if group != "" { + key = group + "/" + agentName + } sidecarPath := filepath.Join(dir, de.Name()) - data, err := os.ReadFile(sidecarPath) - if err != nil { - continue - } + mergeAgentSidecar(store, key, group, sidecarPath) + os.Remove(sidecarPath) + } +} - var meta SkillMeta - if err := json.Unmarshal(data, &meta); err != nil { - continue - } +// mergeAgentSidecar reads a SkillMeta sidecar and merges its fields into the store. +func mergeAgentSidecar(store *MetadataStore, key, group, sidecarPath string) { + data, err := os.ReadFile(sidecarPath) + if err != nil { + return + } - entry := store.Get(agentName) - if entry == nil { - entry = &MetadataEntry{} - store.Set(agentName, entry) - } + var meta SkillMeta + if err := json.Unmarshal(data, &meta); err != nil { + return + } - if meta.Source != "" && entry.Source == "" { - entry.Source = meta.Source - } - entry.Kind = "agent" - if meta.Type != "" { - entry.Type = meta.Type - } - if !meta.InstalledAt.IsZero() { - entry.InstalledAt = meta.InstalledAt - } - if meta.RepoURL != "" { - entry.RepoURL = meta.RepoURL - } - if meta.Subdir != "" { - entry.Subdir = meta.Subdir - } - if meta.Version != "" { - entry.Version = meta.Version - } - if meta.TreeHash != "" { - entry.TreeHash = meta.TreeHash - } - if meta.FileHashes != nil { - entry.FileHashes = meta.FileHashes - } - if meta.Branch != "" && entry.Branch == "" { - entry.Branch = meta.Branch - } + entry := store.Get(key) + if entry == nil { + entry = &MetadataEntry{} + store.Set(key, entry) + } - os.Remove(sidecarPath) + if meta.Source != "" && entry.Source == "" { + entry.Source = meta.Source + } + entry.Kind = "agent" + if meta.Type != "" { + entry.Type = meta.Type + } + if !meta.InstalledAt.IsZero() { + entry.InstalledAt = meta.InstalledAt + } + if meta.RepoURL != "" { + entry.RepoURL = meta.RepoURL + } + if meta.Subdir != "" { + entry.Subdir = meta.Subdir + } + if meta.Version != "" { + entry.Version = meta.Version + } + if meta.TreeHash != "" { + entry.TreeHash = meta.TreeHash + } + if meta.FileHashes != nil { + entry.FileHashes = meta.FileHashes + } + if meta.Branch != "" && entry.Branch == "" { + entry.Branch = meta.Branch + } + if group != "" && entry.Group == "" { + entry.Group = group } } From 4af128fcf055883c683defc8a8794dbaa0977aa0 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 16:35:27 +0800 Subject: [PATCH 089/205] fix: migrate metadata consumers from Get(basename) to GetByPath All call sites that looked up metadata entries using filepath.Base() now use store.GetByPath(relPath) which handles both full-path keys and legacy basename+group keys transparently. Also fixes makeInstallOpts() to set SourceDir from updateContext, preventing nested skill updates from writing metadata to wrong dir. --- cmd/skillshare/check.go | 2 +- cmd/skillshare/doctor.go | 3 +-- cmd/skillshare/install.go | 2 +- cmd/skillshare/list.go | 3 +-- cmd/skillshare/update_batch.go | 1 + cmd/skillshare/update_handlers.go | 12 ++++-------- internal/hub/index.go | 2 +- internal/server/handler_check.go | 2 +- internal/server/handler_check_stream.go | 2 +- internal/server/handler_skills.go | 4 ++-- internal/server/handler_skills_batch.go | 3 +-- internal/server/handler_update.go | 6 +++--- 12 files changed, 18 insertions(+), 24 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 70b210dd..69814b21 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -80,7 +80,7 @@ func collectCheckItems(sourceDir string, repos []string, skills []string) ( for _, skill := range skills { skillPath := filepath.Join(sourceDir, skill) - entry := store.Get(skill) + entry := store.GetByPath(skill) if entry == nil || entry.RepoURL == "" { result := checkSkillResult{Name: skill, Status: "local"} diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index 08357e4d..be845a59 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -692,8 +692,7 @@ func checkSkillIntegrity(result *doctorResult, discovered []sync.DiscoveredSkill } for _, skill := range discovered { - skillName := filepath.Base(skill.SourcePath) - entry := store.Get(skillName) + entry := store.GetByPath(skill.RelPath) if entry == nil { continue // Local skill without meta — expected, skip silently } diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index d194b64b..19b08b65 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -251,7 +251,7 @@ func resolveSkillFromName(skillName string, cfg *config.Config) (*install.Source return nil, fmt.Errorf("skill '%s' not found or has no metadata", skillName) } - entry := store.Get(skillName) + entry := store.GetByPath(skillName) if entry == nil || entry.Source == "" { return nil, fmt.Errorf("skill '%s' has no metadata, cannot update", skillName) } diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 668ea960..bca00ab2 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -232,8 +232,7 @@ func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { } // Enrich from centralized metadata store - skillName := filepath.Base(d.SourcePath) - if entry := store.Get(skillName); entry != nil { + if entry := store.GetByPath(d.RelPath); entry != nil { skills[i].Source = entry.Source skills[i].Type = entry.Type if !entry.InstalledAt.IsZero() { diff --git a/cmd/skillshare/update_batch.go b/cmd/skillshare/update_batch.go index d680e639..bc7dc6d0 100644 --- a/cmd/skillshare/update_batch.go +++ b/cmd/skillshare/update_batch.go @@ -40,6 +40,7 @@ func (uc *updateContext) makeInstallOpts() install.InstallOptions { Update: true, SkipAudit: uc.opts.skipAudit, AuditThreshold: uc.opts.threshold, + SourceDir: uc.sourcePath, } if uc.isProject() { opts.AuditProjectRoot = uc.projectRoot diff --git a/cmd/skillshare/update_handlers.go b/cmd/skillshare/update_handlers.go index 19dea97b..469d8d05 100644 --- a/cmd/skillshare/update_handlers.go +++ b/cmd/skillshare/update_handlers.go @@ -220,7 +220,7 @@ func updateRegularSkill(uc *updateContext, skillName string) (updateResult, erro if storeErr != nil { return updateResult{skipped: 1}, fmt.Errorf("cannot read metadata for '%s': %w", skillName, storeErr) } - meta := store.Get(skillName) + meta := store.GetByPath(skillName) if meta == nil || meta.Source == "" { return updateResult{skipped: 1}, fmt.Errorf("skill '%s' has no source metadata, cannot update", skillName) } @@ -363,13 +363,9 @@ func updateSkillFromMeta(uc *updateContext, skillPath string, cachedMeta *instal meta := cachedMeta if meta == nil { store, _ := install.LoadMetadataWithMigration(uc.sourcePath, "") - skillName := filepath.Base(skillPath) - // Try base name first, then relative path from source - meta = store.Get(skillName) - if meta == nil { - if rel, relErr := filepath.Rel(uc.sourcePath, skillPath); relErr == nil { - meta = store.Get(rel) - } + // GetByPath handles both full-path keys and legacy basename+group keys. + if rel, relErr := filepath.Rel(uc.sourcePath, skillPath); relErr == nil { + meta = store.GetByPath(filepath.ToSlash(rel)) } if meta == nil || meta.Source == "" { return false, nil, nil diff --git a/internal/hub/index.go b/internal/hub/index.go index ee903ff2..9e24655b 100644 --- a/internal/hub/index.go +++ b/internal/hub/index.go @@ -78,7 +78,7 @@ func BuildIndex(sourcePath string, full bool, auditSkills bool) (*Index, error) // Determine source: prefer entry.Source (remote origin), fallback to relPath. source := d.RelPath - if entry := store.Get(item.Name); entry != nil { + if entry := store.GetByPath(d.RelPath); entry != nil { if entry.Source != "" { source = entry.Source } diff --git a/internal/server/handler_check.go b/internal/server/handler_check.go index 6fc4fb96..63ff64a4 100644 --- a/internal/server/handler_check.go +++ b/internal/server/handler_check.go @@ -75,7 +75,7 @@ func (s *Server) handleCheck(w http.ResponseWriter, r *http.Request) { var localResults []skillCheckResult for _, skill := range skills { - entry := s.skillsStore.Get(skill) + entry := s.skillsStore.GetByPath(skill) if entry == nil || entry.RepoURL == "" { localResults = append(localResults, skillCheckResult{ Name: skill, diff --git a/internal/server/handler_check_stream.go b/internal/server/handler_check_stream.go index 8600034c..18ae79b3 100644 --- a/internal/server/handler_check_stream.go +++ b/internal/server/handler_check_stream.go @@ -49,7 +49,7 @@ func (s *Server) handleCheckStream(w http.ResponseWriter, r *http.Request) { var localResults []skillCheckResult for _, skill := range skills { - entry := s.skillsStore.Get(skill) + entry := s.skillsStore.GetByPath(skill) if entry == nil || entry.RepoURL == "" { localResults = append(localResults, skillCheckResult{ Name: skill, diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index e5309e62..2fd995ec 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -76,7 +76,7 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { Disabled: d.Disabled, } - if entry := s.skillsStore.Get(filepath.Base(d.SourcePath)); entry != nil { + if entry := s.skillsStore.GetByPath(d.RelPath); entry != nil { if !entry.InstalledAt.IsZero() { item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) } @@ -157,7 +157,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { Disabled: d.Disabled, } - if entry := s.skillsStore.Get(filepath.Base(d.SourcePath)); entry != nil { + if entry := s.skillsStore.GetByPath(d.RelPath); entry != nil { if !entry.InstalledAt.IsZero() { item.InstalledAt = entry.InstalledAt.Format("2006-01-02T15:04:05Z") } diff --git a/internal/server/handler_skills_batch.go b/internal/server/handler_skills_batch.go index a885720e..a204b441 100644 --- a/internal/server/handler_skills_batch.go +++ b/internal/server/handler_skills_batch.go @@ -191,8 +191,7 @@ func (s *Server) handleSetSkillTargets(w http.ResponseWriter, r *http.Request) { return } - skillName := filepath.Base(d.SourcePath) - s.skillsStore.RefreshHashes(skillName, d.SourcePath) + s.skillsStore.RefreshHashes(d.RelPath, d.SourcePath) s.skillsStore.Save(s.cfg.Source) //nolint:errcheck s.writeOpsLog("set-skill-targets", "ok", start, map[string]any{ diff --git a/internal/server/handler_update.go b/internal/server/handler_update.go index 466d2ea4..68ae4462 100644 --- a/internal/server/handler_update.go +++ b/internal/server/handler_update.go @@ -126,7 +126,7 @@ func (s *Server) handleUpdate(w http.ResponseWriter, r *http.Request) { func (s *Server) updateSingle(name string, force, skipAudit bool) updateResultItem { // Try exact skill path first (prevents basename collision with nested repos) skillPath := filepath.Join(s.cfg.Source, name) - if entry := s.skillsStore.Get(name); entry != nil && entry.Source != "" { + if entry := s.skillsStore.GetByPath(name); entry != nil && entry.Source != "" { return s.updateRegularSkill(name, skillPath, skipAudit) } @@ -262,7 +262,7 @@ func (s *Server) auditGateTrackedRepo(name, repoPath, beforeHash, threshold stri } func (s *Server) updateRegularSkill(name, skillPath string, skipAudit bool) updateResultItem { - entry := s.skillsStore.Get(name) + entry := s.skillsStore.GetByPath(name) if entry == nil { return updateResultItem{Name: name, Action: "error", Message: "no metadata found"} } @@ -358,7 +358,7 @@ func getServerUpdatableSkills(sourceDir string, store *install.MetadataStore) ([ if relP, relErr2 := filepath.Rel(walkRoot, path); relErr2 == nil { relName = filepath.ToSlash(relP) } - entry := store.Get(relName) + entry := store.GetByPath(relName) if entry == nil || entry.Source == "" { return nil // continue walking into subdirectories } From b5266574a76ecb0b040a30eb99e8fa43f2ca6f58 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 16:35:35 +0800 Subject: [PATCH 090/205] fix: rewrite agent update to use discovery + single-file installer reinstallAgent previously called install.Install() which expects directory-based skill installation. Agent sources point to .md files, so the subdir resolver rejected them. Now uses DiscoverFromGit/DiscoverFromGitSubdir + InstallAgentFromDiscovery which correctly handles single-file agent copies. Also preserves the original repo subdir scope for monorepo installs and reconstructs the correct destination subdirectory for grouped agents. --- cmd/skillshare/update_agents.go | 56 ++++++++++++++++++++++++++++----- 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index 73fe06d9..d2a0eb51 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -150,7 +150,9 @@ func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { return nil } -// reinstallAgent re-installs an agent from its recorded source. +// reinstallAgent re-installs an agent from its recorded source using +// discovery + InstallAgentFromDiscovery (single-file copy), not the +// directory-based skill installer. func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { metaFile := filepath.Join(agentsDir, r.Name+".skillshare-meta.json") @@ -168,7 +170,11 @@ func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { return fmt.Errorf("no source in metadata") } - // Parse and re-install from source + // Reconstruct the repo-level subdir for discovery. + // ParseSource(meta.Source) gives the full path from repo root + // (e.g. "pkg/agents/reviewer.md"). meta.Subdir stores the agent's + // path within the subdir scope (e.g. "agents/reviewer.md"). + // The difference is the original repo subdir (e.g. "pkg"). source, parseErr := install.ParseSource(meta.Source) if parseErr != nil { return fmt.Errorf("invalid source: %w", parseErr) @@ -176,15 +182,49 @@ func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { if meta.Branch != "" { source.Branch = meta.Branch } + repoSubdir := strings.TrimSuffix(source.Subdir, meta.Subdir) + repoSubdir = strings.TrimRight(repoSubdir, "/") + source.Subdir = repoSubdir - installOpts := install.InstallOptions{ - Kind: "agent", - AgentNames: []string{filepath.Base(r.Name)}, - Force: true, - Update: true, + // Discover agents — use subdir-scoped discovery for monorepo installs. + var discovery *install.DiscoveryResult + var discErr error + if source.HasSubdir() { + discovery, discErr = install.DiscoverFromGitSubdir(source) + } else { + discovery, discErr = install.DiscoverFromGit(source) + } + if discErr != nil { + return fmt.Errorf("discovery failed: %w", discErr) } + defer install.CleanupDiscovery(discovery) - _, installErr := install.Install(source, agentsDir, installOpts) + // Find the specific agent by name + agentName := filepath.Base(r.Name) + var targetAgent *install.AgentInfo + for i, a := range discovery.Agents { + if a.Name == agentName { + targetAgent = &discovery.Agents[i] + break + } + } + if targetAgent == nil { + return fmt.Errorf("agent %q not found in repository", agentName) + } + + // For grouped agents (r.Name contains "/", e.g. "tools/reviewer"), + // reconstruct the correct destination subdirectory so the file lands + // at agents/tools/reviewer.md rather than agents/reviewer.md. + destDir := agentsDir + if dir := filepath.Dir(r.Name); dir != "." { + destDir = filepath.Join(agentsDir, dir) + } + + installOpts := install.InstallOptions{ + Kind: "agent", + Force: true, + } + _, installErr := install.InstallAgentFromDiscovery(discovery, *targetAgent, destDir, installOpts) return installErr } From ff032b3745b11f7e87cfde2df7267b1f0e8f852a Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 16:35:40 +0800 Subject: [PATCH 091/205] test: update integration tests for full-path metadata keys Grouped install tests now assert full-path keys (e.g. 'frontend/pdf-skill') instead of basename keys, and verify that legacy basename keys are removed after migration. --- tests/integration/install_group_test.go | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tests/integration/install_group_test.go b/tests/integration/install_group_test.go index 9fd221af..b1f4177a 100644 --- a/tests/integration/install_group_test.go +++ b/tests/integration/install_group_test.go @@ -38,9 +38,10 @@ targets: {} if err != nil { t.Fatalf("failed to load metadata: %v", err) } - entry := store.Get("pdf-skill") + // Full-path key: "frontend/pdf-skill" (not just basename "pdf-skill") + entry := store.Get("frontend/pdf-skill") if entry == nil { - t.Fatal("expected metadata entry for pdf-skill") + t.Fatal("expected metadata entry for 'frontend/pdf-skill'") } if entry.Group != "frontend" { t.Errorf("metadata group = %q, want %q", entry.Group, "frontend") @@ -68,9 +69,10 @@ targets: {} if err != nil { t.Fatalf("failed to load metadata: %v", err) } - entry := store.Get("ui-skill") + // Full-path key: "frontend/vue/ui-skill" + entry := store.Get("frontend/vue/ui-skill") if entry == nil { - t.Fatal("expected metadata entry for ui-skill") + t.Fatal("expected metadata entry for 'frontend/vue/ui-skill'") } if entry.Group != "frontend/vue" { t.Errorf("metadata group = %q, want %q", entry.Group, "frontend/vue") @@ -104,9 +106,10 @@ targets: {} if err != nil { t.Fatalf("failed to load metadata: %v", err) } - entry := store.Get("source-pdf") + // Full-path key: "frontend/source-pdf" + entry := store.Get("frontend/source-pdf") if entry == nil { - t.Fatal("expected metadata entry for source-pdf after --into install") + t.Fatal("expected metadata entry for 'frontend/source-pdf' after --into install") } if entry.Group != "frontend" { t.Errorf("metadata group = %q, want %q", entry.Group, "frontend") @@ -159,9 +162,10 @@ func TestInstallProject_Into_RecordsGroupField(t *testing.T) { if err != nil { t.Fatalf("failed to load metadata: %v", err) } - entry := store.Get("my-skill") + // Full-path key: "tools/my-skill" + entry := store.Get("tools/my-skill") if entry == nil { - t.Fatal("expected metadata entry for my-skill") + t.Fatal("expected metadata entry for 'tools/my-skill'") } if entry.Group != "tools" { t.Errorf("metadata group = %q, want %q", entry.Group, "tools") From 0244abb4ceaae040ed38420560adcf1e56a8def9 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 17:43:09 +0800 Subject: [PATCH 092/205] refactor: consolidate reconcile walk logic into shared core Extract the identical walk-and-reconcile logic from ReconcileGlobalSkills and ReconcileProjectSkills into reconcileSkillsWalk() in a new reconcile_core.go file. Both callers are now thin wrappers. - Move git helpers (isGitRepo, gitCurrentBranch, gitRemoteOrigin) to core - Add pruneStaleEntries() shared helper - Project version injects gitignore collection via onFound callback - ~140 lines of duplication eliminated --- internal/config/project_reconcile.go | 157 ++------------------------ internal/config/reconcile.go | 118 +------------------- internal/config/reconcile_core.go | 161 +++++++++++++++++++++++++++ 3 files changed, 175 insertions(+), 261 deletions(-) create mode 100644 internal/config/reconcile_core.go diff --git a/internal/config/project_reconcile.go b/internal/config/project_reconcile.go index 18c1effd..14062b01 100644 --- a/internal/config/project_reconcile.go +++ b/internal/config/project_reconcile.go @@ -3,12 +3,10 @@ package config import ( "fmt" "os" - "os/exec" "path/filepath" "strings" "skillshare/internal/install" - "skillshare/internal/utils" ) // ReconcileProjectSkills scans the project source directory recursively for @@ -17,141 +15,30 @@ import ( // It also updates .skillshare/.gitignore for each tracked skill. func ReconcileProjectSkills(projectRoot string, projectCfg *ProjectConfig, store *install.MetadataStore, sourcePath string) error { if _, err := os.Stat(sourcePath); os.IsNotExist(err) { - return nil // no skills dir yet + return nil } - changed := false - - // Collect gitignore entries during walk, then batch-update once at the end. var gitignoreEntries []string - - walkRoot := utils.ResolveSymlink(sourcePath) - live := map[string]bool{} // tracks skills actually found on disk - err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, walkErr error) error { - if walkErr != nil { - return nil - } - if path == walkRoot { - return nil - } - if !d.IsDir() { - return nil - } - // Skip hidden directories - if utils.IsHidden(d.Name()) { - return filepath.SkipDir - } - // Skip .git directories - if d.Name() == ".git" { - return filepath.SkipDir - } - - relPath, relErr := filepath.Rel(walkRoot, path) - if relErr != nil { - return nil - } - - fullPath := filepath.ToSlash(relPath) - - // Extract group from the relative path (e.g. "frontend/foo" → "frontend"). - group := "" - if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { - group = fullPath[:idx] - } - - // Determine source and tracked status - var source string - tracked := isGitRepo(path) - - // Look up using GetByPath which handles both full-path keys and - // legacy basename keys (for backward compatibility during migration). - existing := store.GetByPath(fullPath) - if existing != nil && existing.Source != "" { - source = existing.Source - } else if tracked { - // Tracked repos have no store entry yet; derive source from git remote - source = gitRemoteOrigin(path) - } - if source == "" { - // Not an installed skill — continue walking deeper - return nil - } - - live[fullPath] = true - - // Determine branch: from store entry or git (tracked repos) - var branch string - if existing != nil && existing.Branch != "" { - branch = existing.Branch - } else if tracked { - branch = gitCurrentBranch(path) - } - - if existing != nil { - if store.MigrateLegacyKey(fullPath, existing) { - changed = true - } - if existing.Source != source { - existing.Source = source - changed = true - } - if existing.Tracked != tracked { - existing.Tracked = tracked - changed = true - } - if existing.Branch != branch { - existing.Branch = branch - changed = true - } - if existing.Group != group { - existing.Group = group - changed = true - } - } else { - entry := &install.MetadataEntry{ - Source: source, - Tracked: tracked, - Branch: branch, - Group: group, - } - store.Set(fullPath, entry) - changed = true - } - + onFound := func(fullPath string) { gitignoreEntries = append(gitignoreEntries, filepath.Join("skills", fullPath)) + } - // If it's a tracked repo (has .git), don't recurse into it - if tracked { - return filepath.SkipDir - } - - // If it has a source, it's a leaf skill — don't recurse - if existing != nil && existing.Source != "" { - return filepath.SkipDir - } - - return nil - }) + result, err := reconcileSkillsWalk(sourcePath, store, onFound) if err != nil { return fmt.Errorf("failed to scan project skills: %w", err) } - // Prune stale entries: skills in store but no longer on disk - for _, name := range store.List() { - if !live[name] { - store.Remove(name) - changed = true - } + if pruneStaleEntries(store, result.live) { + result.changed = true } - // Batch-update .gitignore once (reads/writes the file only once instead of per-skill). if len(gitignoreEntries) > 0 { if err := install.UpdateGitIgnoreBatch(filepath.Join(projectRoot, ".skillshare"), gitignoreEntries); err != nil { return fmt.Errorf("failed to update .skillshare/.gitignore: %w", err) } } - if changed { + if result.changed { if err := store.Save(sourcePath); err != nil { return err } @@ -184,13 +71,11 @@ func ReconcileProjectAgents(projectRoot string, store *install.MetadataStore, ag agentName := strings.TrimSuffix(name, ".md") - // Check store for this agent existing := store.Get(agentName) if existing == nil || existing.Source == "" { - continue // local agent, not installed + continue } - // Ensure kind is set if existing.Kind != "agent" { existing.Kind = "agent" changed = true @@ -213,29 +98,3 @@ func ReconcileProjectAgents(projectRoot string, store *install.MetadataStore, ag return nil } - -// isGitRepo checks if the given path is a git repository (has .git/ directory or file). -func isGitRepo(path string) bool { - _, err := os.Stat(filepath.Join(path, ".git")) - return err == nil -} - -// gitCurrentBranch returns the current branch name for a git repo, or "" on failure. -func gitCurrentBranch(repoPath string) string { - cmd := exec.Command("git", "-C", repoPath, "rev-parse", "--abbrev-ref", "HEAD") - out, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimSpace(string(out)) -} - -// gitRemoteOrigin returns the "origin" remote URL for a git repo, or "" on failure. -func gitRemoteOrigin(repoPath string) string { - cmd := exec.Command("git", "-C", repoPath, "remote", "get-url", "origin") - out, err := cmd.Output() - if err != nil { - return "" - } - return strings.TrimSpace(string(out)) -} diff --git a/internal/config/reconcile.go b/internal/config/reconcile.go index 479402c3..79fe09b0 100644 --- a/internal/config/reconcile.go +++ b/internal/config/reconcile.go @@ -3,135 +3,29 @@ package config import ( "fmt" "os" - "path/filepath" - "strings" "skillshare/internal/install" - "skillshare/internal/utils" ) // ReconcileGlobalSkills scans the global source directory for remotely-installed // skills (those with install metadata or tracked repos) and ensures they are -// present in the MetadataStore. This is the global-mode counterpart of -// ReconcileProjectSkills. +// present in the MetadataStore. func ReconcileGlobalSkills(cfg *Config, store *install.MetadataStore) error { sourcePath := cfg.Source if _, err := os.Stat(sourcePath); os.IsNotExist(err) { - return nil // no skills dir yet + return nil } - changed := false - - walkRoot := utils.ResolveSymlink(sourcePath) - live := map[string]bool{} // tracks skills actually found on disk - err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, walkErr error) error { - if walkErr != nil { - return nil - } - if path == walkRoot { - return nil - } - if !d.IsDir() { - return nil - } - if utils.IsHidden(d.Name()) { - return filepath.SkipDir - } - if d.Name() == ".git" { - return filepath.SkipDir - } - - relPath, relErr := filepath.Rel(walkRoot, path) - if relErr != nil { - return nil - } - - fullPath := filepath.ToSlash(relPath) - - // Extract group from the relative path (e.g. "frontend/foo" → "frontend"). - group := "" - if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { - group = fullPath[:idx] - } - - var source string - tracked := isGitRepo(path) - - // Look up using GetByPath which handles both full-path keys and - // legacy basename keys (for backward compatibility during migration). - existing := store.GetByPath(fullPath) - if existing != nil && existing.Source != "" { - source = existing.Source - } else if tracked { - source = gitRemoteOrigin(path) - } - if source == "" { - return nil - } - - live[fullPath] = true - - // Determine branch: from store entry or git (tracked repos) - var branch string - if existing != nil && existing.Branch != "" { - branch = existing.Branch - } else if tracked { - branch = gitCurrentBranch(path) - } - - if existing != nil { - if store.MigrateLegacyKey(fullPath, existing) { - changed = true - } - if existing.Source != source { - existing.Source = source - changed = true - } - if existing.Tracked != tracked { - existing.Tracked = tracked - changed = true - } - if existing.Branch != branch { - existing.Branch = branch - changed = true - } - if existing.Group != group { - existing.Group = group - changed = true - } - } else { - entry := &install.MetadataEntry{ - Source: source, - Tracked: tracked, - Branch: branch, - Group: group, - } - store.Set(fullPath, entry) - changed = true - } - - if tracked { - return filepath.SkipDir - } - if existing != nil && existing.Source != "" { - return filepath.SkipDir - } - - return nil - }) + result, err := reconcileSkillsWalk(sourcePath, store, nil) if err != nil { return fmt.Errorf("failed to scan global skills: %w", err) } - // Prune stale entries: skills in store but no longer on disk - for _, name := range store.List() { - if !live[name] { - store.Remove(name) - changed = true - } + if pruneStaleEntries(store, result.live) { + result.changed = true } - if changed { + if result.changed { if err := store.Save(sourcePath); err != nil { return err } diff --git a/internal/config/reconcile_core.go b/internal/config/reconcile_core.go new file mode 100644 index 00000000..195f8b51 --- /dev/null +++ b/internal/config/reconcile_core.go @@ -0,0 +1,161 @@ +package config + +import ( + "os" + "os/exec" + "path/filepath" + "strings" + + "skillshare/internal/install" + "skillshare/internal/utils" +) + +// reconcileResult holds the output of a reconcile walk. +type reconcileResult struct { + live map[string]bool + changed bool +} + +// reconcileSkillsWalk walks sourcePath for installed skills (those with metadata +// or tracked repos) and ensures they are present in the MetadataStore. +// onFound is called for each discovered installed skill; pass nil to skip. +func reconcileSkillsWalk(sourcePath string, store *install.MetadataStore, onFound func(fullPath string)) (reconcileResult, error) { + result := reconcileResult{live: map[string]bool{}} + + walkRoot := utils.ResolveSymlink(sourcePath) + err := filepath.WalkDir(walkRoot, func(path string, d os.DirEntry, walkErr error) error { + if walkErr != nil { + return nil + } + if path == walkRoot { + return nil + } + if !d.IsDir() { + return nil + } + if utils.IsHidden(d.Name()) { + return filepath.SkipDir + } + if d.Name() == ".git" { + return filepath.SkipDir + } + + relPath, relErr := filepath.Rel(walkRoot, path) + if relErr != nil { + return nil + } + + fullPath := filepath.ToSlash(relPath) + + group := "" + if idx := strings.LastIndex(fullPath, "/"); idx >= 0 { + group = fullPath[:idx] + } + + var source string + tracked := isGitRepo(path) + + existing := store.GetByPath(fullPath) + if existing != nil && existing.Source != "" { + source = existing.Source + } else if tracked { + source = gitRemoteOrigin(path) + } + if source == "" { + return nil + } + + result.live[fullPath] = true + + var branch string + if existing != nil && existing.Branch != "" { + branch = existing.Branch + } else if tracked { + branch = gitCurrentBranch(path) + } + + if existing != nil { + if store.MigrateLegacyKey(fullPath, existing) { + result.changed = true + } + if existing.Source != source { + existing.Source = source + result.changed = true + } + if existing.Tracked != tracked { + existing.Tracked = tracked + result.changed = true + } + if existing.Branch != branch { + existing.Branch = branch + result.changed = true + } + if existing.Group != group { + existing.Group = group + result.changed = true + } + } else { + entry := &install.MetadataEntry{ + Source: source, + Tracked: tracked, + Branch: branch, + Group: group, + } + store.Set(fullPath, entry) + result.changed = true + } + + if onFound != nil { + onFound(fullPath) + } + + if tracked { + return filepath.SkipDir + } + if existing != nil && existing.Source != "" { + return filepath.SkipDir + } + + return nil + }) + + return result, err +} + +// pruneStaleEntries removes store entries not present in the live set. +func pruneStaleEntries(store *install.MetadataStore, live map[string]bool) bool { + changed := false + for _, name := range store.List() { + if !live[name] { + store.Remove(name) + changed = true + } + } + return changed +} + +// isGitRepo checks if the given path is a git repository (has .git/ directory or file). +func isGitRepo(path string) bool { + _, err := os.Stat(filepath.Join(path, ".git")) + return err == nil +} + +// gitCurrentBranch returns the current branch name for a git repo, or "" on failure. +func gitCurrentBranch(repoPath string) string { + cmd := exec.Command("git", "-C", repoPath, "rev-parse", "--abbrev-ref", "HEAD") + out, err := cmd.Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} + +// gitRemoteOrigin returns the "origin" remote URL for a git repo, or "" on failure. +func gitRemoteOrigin(repoPath string) string { + cmd := exec.Command("git", "-C", repoPath, "remote", "get-url", "origin") + out, err := cmd.Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(out)) +} From 67c7010b6d8257b6d8044a7e6aeff74444cfdf39 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 17:43:19 +0800 Subject: [PATCH 093/205] refactor: remove dead MetadataEntry.Name field and add LoadMetadataOrNew - Remove MetadataEntry.Name (json:"-", never set in production) and FullName() method (only used in one test) - Add LoadMetadataOrNew() helper to replace 8+ instances of the LoadMetadata + nil guard boilerplate --- internal/install/metadata.go | 18 +++++++++--------- internal/install/metadata_test.go | 28 +--------------------------- 2 files changed, 10 insertions(+), 36 deletions(-) diff --git a/internal/install/metadata.go b/internal/install/metadata.go index 4b41eb8e..6b79135d 100644 --- a/internal/install/metadata.go +++ b/internal/install/metadata.go @@ -35,7 +35,6 @@ type MetadataEntry struct { Group string `json:"group,omitempty"` Branch string `json:"branch,omitempty"` Into string `json:"into,omitempty"` - Name string `json:"-"` // runtime only, not persisted (map key is the name) // Meta fields InstalledAt time.Time `json:"installed_at,omitzero"` @@ -134,14 +133,6 @@ func (e *MetadataEntry) EffectiveKind() string { return e.Kind } -// FullName returns "group/name" if Group is set, otherwise Name. -func (e *MetadataEntry) FullName() string { - if e.Group != "" { - return e.Group + "/" + e.Name - } - return e.Name -} - // RemoveByNames removes entries matching the given names, including group members. // Handles direct key matches, full-path matches (group/name), and group membership. // Works with both legacy basename keys and full-path keys. @@ -264,6 +255,15 @@ func LoadMetadata(dir string) (*MetadataStore, error) { return store, nil } +// LoadMetadataOrNew loads metadata from dir, returning an empty store on error. +func LoadMetadataOrNew(dir string) *MetadataStore { + store, _ := LoadMetadata(dir) + if store == nil { + return NewMetadataStore() + } + return store +} + // Save writes .metadata.json atomically (temp file → rename). func (s *MetadataStore) Save(dir string) error { if err := os.MkdirAll(dir, 0755); err != nil { diff --git a/internal/install/metadata_test.go b/internal/install/metadata_test.go index 3b88a246..e60e31cb 100644 --- a/internal/install/metadata_test.go +++ b/internal/install/metadata_test.go @@ -167,32 +167,6 @@ func TestMetadataEntry_EffectiveKind(t *testing.T) { } } -func TestMetadataEntry_FullName(t *testing.T) { - tests := []struct { - name string - group string - entry string - want string - }{ - {"no group", "", "my-skill", "my-skill"}, - {"with group", "frontend", "my-skill", "frontend/my-skill"}, - {"nested group", "team/frontend", "my-skill", "team/frontend/my-skill"}, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - e := &MetadataEntry{ - Name: tt.entry, - Group: tt.group, - } - got := e.FullName() - if got != tt.want { - t.Errorf("FullName() = %q, want %q", got, tt.want) - } - }) - } -} - func TestNewMetadataStore_InitialState(t *testing.T) { s := NewMetadataStore() if s == nil { @@ -302,7 +276,7 @@ func TestMetadataStore_SaveCreatesDir(t *testing.T) { func TestMetadataPath(t *testing.T) { got := MetadataPath("/some/dir") - want := filepath.Join("/some/dir", ".metadata.json") + want := filepath.Join("/some/dir", MetadataFileName) if got != want { t.Errorf("MetadataPath = %q, want %q", got, want) } From 7552c76973301d66abb7ffcfbe4d1f3accd0db96 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 17:43:30 +0800 Subject: [PATCH 094/205] refactor: eliminate sidecar reads and use centralized metadata store Production code no longer reads or writes .skillshare-meta.json sidecars: - InstallAgentFromDiscovery: write to centralized store via WriteMetaToStore - agent_check.go: read from centralized store passed as parameter - handler_skills.go: read agents from s.agentsStore - audit.go: readMetaFileHashes reads from parent .metadata.json - analyzer_metadata.go: extract findMetaEntry to deduplicate walk logic - uninstall_agents.go: remove entry from centralized store on uninstall - batchUpdateAgents: pass store to reinstallAgent, remove dead field - Replace LoadMetadata+nil guard with LoadMetadataOrNew across 6 files --- cmd/skillshare/check.go | 5 +- cmd/skillshare/doctor.go | 8 +- cmd/skillshare/list.go | 31 ++-- cmd/skillshare/uninstall_agents.go | 19 ++- cmd/skillshare/update_agents.go | 185 ++++++++++++++++++------ internal/audit/analyzer_metadata.go | 59 ++++++-- internal/audit/audit.go | 29 ++-- internal/check/agent_check.go | 42 +++--- internal/hub/index.go | 5 +- internal/install/install_apply.go | 9 +- internal/server/handler_helpers_test.go | 10 +- internal/server/handler_skills.go | 39 +++-- 12 files changed, 277 insertions(+), 164 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index 69814b21..d3792728 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -70,10 +70,7 @@ func collectCheckItems(sourceDir string, repos []string, skills []string) ( } // Load centralized metadata store once for all skills. - store, _ := install.LoadMetadata(sourceDir) - if store == nil { - store = install.NewMetadataStore() - } + store := install.LoadMetadataOrNew(sourceDir) urlGroups := make(map[string][]skillWithMeta) var localResults []checkSkillResult diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index be845a59..5b68d2ee 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -680,15 +680,11 @@ func checkSkillIntegrity(result *doctorResult, discovered []sync.DiscoveredSkill var toVerify []verifiable var skippedNames []string - // Load centralized metadata store once. - var store *install.MetadataStore + store := install.NewMetadataStore() if len(discovered) > 0 { sourceDir := strings.TrimSuffix(discovered[0].SourcePath, discovered[0].RelPath) sourceDir = strings.TrimRight(sourceDir, `/\`) - store, _ = install.LoadMetadata(sourceDir) - } - if store == nil { - store = install.NewMetadataStore() + store = install.LoadMetadataOrNew(sourceDir) } for _, skill := range discovered { diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index bca00ab2..043bee7c 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -204,15 +204,11 @@ func sortSkillEntries(skills []skillEntry, sortBy string) { func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { skills := make([]skillEntry, len(discovered)) - // Load centralized metadata store once (derive source dir from first skill). - var store *install.MetadataStore + store := install.NewMetadataStore() if len(discovered) > 0 { sourceDir := strings.TrimSuffix(discovered[0].SourcePath, discovered[0].RelPath) sourceDir = strings.TrimRight(sourceDir, `/\`) - store, _ = install.LoadMetadata(sourceDir) - } - if store == nil { - store = install.NewMetadataStore() + store = install.LoadMetadataOrNew(sourceDir) } // Pre-fill non-I/O fields + metadata from store @@ -264,8 +260,8 @@ func buildSkillEntries(discovered []sync.DiscoveredSkill) []skillEntry { } // discoverAndBuildAgentEntries discovers agents from the given source directory -// and builds skillEntry items with Kind="agent". Reads sidecar metadata for -// installed agents (.skillshare-meta.json). +// and builds skillEntry items with Kind="agent", enriched from the centralized +// metadata store. func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { if agentsSource == "" { return nil @@ -275,6 +271,8 @@ func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { return nil } + store := install.LoadMetadataOrNew(agentsSource) + entries := make([]skillEntry, len(discovered)) for i, d := range discovered { entries[i] = skillEntry{ @@ -284,17 +282,12 @@ func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { IsNested: d.IsNested, Disabled: d.Disabled, } - // Read sidecar metadata: .skillshare-meta.json (alongside the .md file) - baseName := strings.TrimSuffix(filepath.Base(d.RelPath), ".md") - metaPath := filepath.Join(filepath.Dir(d.SourcePath), baseName+".skillshare-meta.json") - if data, readErr := os.ReadFile(metaPath); readErr == nil { - var meta install.SkillMeta - if jsonErr := json.Unmarshal(data, &meta); jsonErr == nil { - entries[i].Source = meta.Source - entries[i].Type = meta.Type - if !meta.InstalledAt.IsZero() { - entries[i].InstalledAt = meta.InstalledAt.Format("2006-01-02") - } + key := strings.TrimSuffix(d.RelPath, ".md") + if entry := store.GetByPath(key); entry != nil { + entries[i].Source = entry.Source + entries[i].Type = entry.Type + if !entry.InstalledAt.IsZero() { + entries[i].InstalledAt = entry.InstalledAt.Format("2006-01-02") } } } diff --git a/cmd/skillshare/uninstall_agents.go b/cmd/skillshare/uninstall_agents.go index 5108b3bb..2d64528d 100644 --- a/cmd/skillshare/uninstall_agents.go +++ b/cmd/skillshare/uninstall_agents.go @@ -7,6 +7,7 @@ import ( "strings" "time" + "skillshare/internal/install" "skillshare/internal/oplog" "skillshare/internal/resource" "skillshare/internal/trash" @@ -103,13 +104,12 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string } trashBase := trash.AgentTrashDir() + store, _ := install.LoadMetadata(agentsDir) var removed []string var failed []string for _, t := range targets { agentFile := filepath.Join(agentsDir, t.RelPath) - metaName := strings.TrimSuffix(filepath.Base(t.RelPath), ".md") - metaFile := filepath.Join(filepath.Dir(agentFile), metaName+".skillshare-meta.json") displayName := strings.TrimSuffix(t.RelPath, ".md") if opts.dryRun { @@ -118,17 +118,30 @@ func cmdUninstallAgents(agentsDir string, opts *uninstallOptions, cfgPath string continue } - _, err := trash.MoveAgentToTrash(agentFile, metaFile, t.Name, trashBase) + // Trash the agent file (+ legacy sidecar if it still exists) + metaName := strings.TrimSuffix(filepath.Base(t.RelPath), ".md") + legacySidecar := filepath.Join(filepath.Dir(agentFile), metaName+".skillshare-meta.json") + _, err := trash.MoveAgentToTrash(agentFile, legacySidecar, t.Name, trashBase) if err != nil { ui.Error("Failed to remove %s: %v", displayName, err) failed = append(failed, displayName) continue } + // Remove from centralized metadata store + if store != nil { + store.Remove(displayName) + } + ui.Success("Removed agent: %s", displayName) removed = append(removed, displayName) } + // Save store after all removals + if store != nil && len(removed) > 0 { + store.Save(agentsDir) //nolint:errcheck + } + // JSON output if opts.jsonOutput { output := struct { diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index d2a0eb51..a01b7777 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -1,7 +1,6 @@ package main import ( - "encoding/json" "fmt" "os" "path/filepath" @@ -109,28 +108,16 @@ func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { } } - // Update each agent by re-installing from its source + // Update agents, batching by repo URL to share git clones. var updated, failed int - for _, r := range updatable { - if opts.dryRun { + if opts.dryRun { + for _, r := range updatable { if !opts.jsonOutput { ui.Info(" %s: update available from %s", r.Name, r.Source) } - continue - } - - err := reinstallAgent(agentsDir, r) - if err != nil { - if !opts.jsonOutput { - ui.Error(" %s: update failed: %v", r.Name, err) - } - failed++ - } else { - if !opts.jsonOutput { - ui.Success(" %s: updated", r.Name) - } - updated++ } + } else { + updated, failed = batchUpdateAgents(agentsDir, updatable, !opts.jsonOutput) } if !opts.jsonOutput && !opts.dryRun { @@ -150,39 +137,149 @@ func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { return nil } -// reinstallAgent re-installs an agent from its recorded source using -// discovery + InstallAgentFromDiscovery (single-file copy), not the -// directory-based skill installer. -func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { - metaFile := filepath.Join(agentsDir, r.Name+".skillshare-meta.json") +// agentRepoKey groups agents by clone URL + branch + repo subdir so agents +// from the same scope share a single git clone. +type agentRepoKey struct { + cloneURL string + branch string + repoSubdir string +} - // Read current metadata - metaData, err := os.ReadFile(metaFile) - if err != nil { - return fmt.Errorf("cannot read metadata: %w", err) +// batchUpdateAgents groups agents by repo URL and clones once per group. +// Agents with no RepoURL fall back to per-agent reinstallAgent. +func batchUpdateAgents(agentsDir string, agents []check.AgentCheckResult, verbose bool) (updated, failed int) { + store := install.LoadMetadataOrNew(agentsDir) + groups := map[agentRepoKey][]check.AgentCheckResult{} + var noRepo []check.AgentCheckResult + + for _, r := range agents { + if r.RepoURL == "" { + noRepo = append(noRepo, r) + continue + } + entry := store.GetByPath(r.Name) + if entry == nil || entry.Source == "" { + noRepo = append(noRepo, r) + continue + } + + source, parseErr := install.ParseSource(entry.Source) + if parseErr != nil { + noRepo = append(noRepo, r) + continue + } + repoSubdir := strings.TrimSuffix(source.Subdir, entry.Subdir) + repoSubdir = strings.TrimRight(repoSubdir, "/") + + key := agentRepoKey{ + cloneURL: r.RepoURL, + branch: entry.Branch, + repoSubdir: repoSubdir, + } + groups[key] = append(groups[key], r) } - var meta install.SkillMeta - if err := json.Unmarshal(metaData, &meta); err != nil { - return fmt.Errorf("invalid metadata: %w", err) + + // Batch: one clone per repo group + for key, members := range groups { + source := &install.Source{ + CloneURL: key.cloneURL, + Subdir: key.repoSubdir, + Branch: key.branch, + } + + var discovery *install.DiscoveryResult + var discErr error + if source.HasSubdir() { + discovery, discErr = install.DiscoverFromGitSubdir(source) + } else { + discovery, discErr = install.DiscoverFromGit(source) + } + if discErr != nil { + for _, m := range members { + if verbose { + ui.Error(" %s: discovery failed: %v", m.Name, discErr) + } + failed++ + } + continue + } + + // Build agent name → AgentInfo lookup + agentIndex := map[string]*install.AgentInfo{} + for i, a := range discovery.Agents { + agentIndex[a.Name] = &discovery.Agents[i] + } + + for _, m := range members { + agentName := filepath.Base(m.Name) + target := agentIndex[agentName] + if target == nil { + if verbose { + ui.Error(" %s: not found in repository", m.Name) + } + failed++ + continue + } + + destDir := agentsDir + if dir := filepath.Dir(m.Name); dir != "." { + destDir = filepath.Join(agentsDir, dir) + } + + opts := install.InstallOptions{Kind: "agent", Force: true, SourceDir: agentsDir} + if _, err := install.InstallAgentFromDiscovery(discovery, *target, destDir, opts); err != nil { + if verbose { + ui.Error(" %s: %v", m.Name, err) + } + failed++ + } else { + if verbose { + ui.Success(" %s: updated", m.Name) + } + updated++ + } + } + + install.CleanupDiscovery(discovery) + } + + // Fallback: agents without RepoURL + for _, r := range noRepo { + if err := reinstallAgent(agentsDir, r, store); err != nil { + if verbose { + ui.Error(" %s: %v", r.Name, err) + } + failed++ + } else { + if verbose { + ui.Success(" %s: updated", r.Name) + } + updated++ + } } - if meta.Source == "" { - return fmt.Errorf("no source in metadata") + return updated, failed +} + +// reinstallAgent re-installs an agent from its recorded source using +// discovery + InstallAgentFromDiscovery (single-file copy), not the +// directory-based skill installer. +// Used as fallback for agents without RepoURL in the batch path. +func reinstallAgent(agentsDir string, r check.AgentCheckResult, store *install.MetadataStore) error { + entry := store.GetByPath(r.Name) + if entry == nil || entry.Source == "" { + return fmt.Errorf("no source metadata for agent %q", r.Name) } // Reconstruct the repo-level subdir for discovery. - // ParseSource(meta.Source) gives the full path from repo root - // (e.g. "pkg/agents/reviewer.md"). meta.Subdir stores the agent's - // path within the subdir scope (e.g. "agents/reviewer.md"). - // The difference is the original repo subdir (e.g. "pkg"). - source, parseErr := install.ParseSource(meta.Source) + source, parseErr := install.ParseSource(entry.Source) if parseErr != nil { return fmt.Errorf("invalid source: %w", parseErr) } - if meta.Branch != "" { - source.Branch = meta.Branch + if entry.Branch != "" { + source.Branch = entry.Branch } - repoSubdir := strings.TrimSuffix(source.Subdir, meta.Subdir) + repoSubdir := strings.TrimSuffix(source.Subdir, entry.Subdir) repoSubdir = strings.TrimRight(repoSubdir, "/") source.Subdir = repoSubdir @@ -221,8 +318,9 @@ func reinstallAgent(agentsDir string, r check.AgentCheckResult) error { } installOpts := install.InstallOptions{ - Kind: "agent", - Force: true, + Kind: "agent", + Force: true, + SourceDir: agentsDir, } _, installErr := install.InstallAgentFromDiscovery(discovery, *targetAgent, destDir, installOpts) return installErr @@ -468,9 +566,10 @@ func cmdUpdateAgentsProject(args []string, projectRoot string, start time.Time) return nil } + store := install.LoadMetadataOrNew(agentsDir) var updated, failed int for _, r := range updatable { - if err := reinstallAgent(agentsDir, r); err != nil { + if err := reinstallAgent(agentsDir, r, store); err != nil { ui.Error(" %s: %v", r.Name, err) failed++ } else { diff --git a/internal/audit/analyzer_metadata.go b/internal/audit/analyzer_metadata.go index 39361d15..016e0f39 100644 --- a/internal/audit/analyzer_metadata.go +++ b/internal/audit/analyzer_metadata.go @@ -10,7 +10,7 @@ import ( ) // metadataAnalyzer cross-references SKILL.md metadata (name, description) -// against the actual git source URL from .skillshare-meta.json. +// against the actual git source URL from the centralized metadata store. // Detects social-engineering patterns: publisher mismatch and authority claims. // Runs at skill scope after all files are walked. type metadataAnalyzer struct{} @@ -20,9 +20,19 @@ func (a *metadataAnalyzer) Scope() AnalyzerScope { return ScopeSkill } // metaJSON is a minimal subset of install.SkillMeta to avoid import cycles. type metaJSON struct { - RepoURL string `json:"repo_url"` + RepoURL string `json:"repo_url"` + FileHashes map[string]string `json:"file_hashes"` } +// metadataStoreJSON is a minimal subset of install.MetadataStore for reading +// the centralized .metadata.json without importing the install package. +type metadataStoreJSON struct { + Entries map[string]metaJSON `json:"entries"` +} + +// metadataFileName mirrors install.MetadataFileName to avoid a circular import. +const metadataFileName = ".metadata.json" + // Rule IDs for disable support via audit-rules.yaml. const ( rulePublisherMismatch = "publisher-mismatch" @@ -47,7 +57,6 @@ func (a *metadataAnalyzer) Analyze(ctx *AnalyzeContext) ([]Finding, error) { return nil, nil } - // Read .skillshare-meta.json for source URL. repoURL := readMetaRepoURL(ctx.SkillPath) // Read SKILL.md frontmatter for name and description. @@ -73,17 +82,45 @@ func (a *metadataAnalyzer) Analyze(ctx *AnalyzeContext) ([]Finding, error) { return findings, nil } -// readMetaRepoURL reads repo_url from .skillshare-meta.json in skillPath. +// findMetaEntry walks up parent directories of skillPath looking for the +// centralized .metadata.json store and returns the raw entry for this skill. +func findMetaEntry(skillPath string) *metaJSON { + skillName := filepath.Base(skillPath) + dir := filepath.Dir(skillPath) + + for i := 0; i < 3 && dir != filepath.Dir(dir); i++ { + data, err := os.ReadFile(filepath.Join(dir, metadataFileName)) + if err == nil { + var store metadataStoreJSON + if json.Unmarshal(data, &store) == nil { + if rel, relErr := filepath.Rel(dir, skillPath); relErr == nil { + key := filepath.ToSlash(rel) + if e, ok := store.Entries[key]; ok { + return &e + } + } + if e, ok := store.Entries[skillName]; ok { + return &e + } + } + } + dir = filepath.Dir(dir) + } + return nil +} + func readMetaRepoURL(skillPath string) string { - data, err := os.ReadFile(filepath.Join(skillPath, ".skillshare-meta.json")) - if err != nil { - return "" + if e := findMetaEntry(skillPath); e != nil { + return e.RepoURL } - var m metaJSON - if json.Unmarshal(data, &m) != nil { - return "" + return "" +} + +func readMetaFileHashes(skillPath string) map[string]string { + if e := findMetaEntry(skillPath); e != nil { + return e.FileHashes } - return m.RepoURL + return nil } // readSkillFrontmatter extracts name and description from SKILL.md. diff --git a/internal/audit/audit.go b/internal/audit/audit.go index 76721966..ba1bc638 100644 --- a/internal/audit/audit.go +++ b/internal/audit/audit.go @@ -3,7 +3,6 @@ package audit import ( "crypto/sha256" "encoding/hex" - "encoding/json" "errors" "fmt" "os" @@ -1633,30 +1632,22 @@ func isExternalOrAnchor(target string) bool { return strings.HasPrefix(target, "#") } -// checkContentIntegrity compares files on disk against pinned hashes in -// .skillshare-meta.json. Backward-compatible: skips silently when meta or -// file_hashes is absent. cache holds file contents already read during the -// walk phase; files not in cache are read from disk as fallback. +// checkContentIntegrity compares files on disk against pinned hashes in the +// centralized .metadata.json store. Backward-compatible: skips silently when +// metadata or file_hashes is absent. cache holds file contents already read +// during the walk phase; files not in cache are read from disk as fallback. // allFiles (if non-nil) is the set of file relPaths collected during the main // walk, used to detect unexpected files without a second filepath.Walk. func checkContentIntegrity(skillPath string, cache map[string][]byte, allFiles map[string]bool) []Finding { - metaPath := filepath.Join(skillPath, ".skillshare-meta.json") - data, err := os.ReadFile(metaPath) - if err != nil { - return nil // no meta → skip - } - - var raw struct { - FileHashes map[string]string `json:"file_hashes"` - } - if err := json.Unmarshal(data, &raw); err != nil || len(raw.FileHashes) == 0 { - return nil // no hashes → skip + fileHashes := readMetaFileHashes(skillPath) + if len(fileHashes) == 0 { + return nil } var findings []Finding // Check pinned files: missing or tampered - for rel, expected := range raw.FileHashes { + for rel, expected := range fileHashes { normalizedRel := filepath.FromSlash(rel) // Reject absolute keys in metadata (e.g. "/etc/passwd"). // file_hashes must always be skill-relative paths. @@ -1733,7 +1724,7 @@ func checkContentIntegrity(skillPath string, cache map[string][]byte, allFiles m if allFiles != nil { // Use pre-collected file set from the main walk (no second walk needed). for relPath := range allFiles { - if _, ok := raw.FileHashes[relPath]; !ok { + if _, ok := fileHashes[relPath]; !ok { findings = append(findings, Finding{ Severity: SeverityLow, Pattern: "content-unexpected", @@ -1767,7 +1758,7 @@ func checkContentIntegrity(skillPath string, cache map[string][]byte, allFiles m return nil } normalized := filepath.ToSlash(rel) - if _, ok := raw.FileHashes[normalized]; !ok { + if _, ok := fileHashes[normalized]; !ok { findings = append(findings, Finding{ Severity: SeverityLow, Pattern: "content-unexpected", diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go index be54662e..4bdccbe2 100644 --- a/internal/check/agent_check.go +++ b/internal/check/agent_check.go @@ -1,8 +1,6 @@ package check import ( - "encoding/json" - "os" "path/filepath" "skillshare/internal/install" @@ -29,44 +27,38 @@ func CheckAgents(agentsDir string) []AgentCheckResult { return nil } + // Load centralized metadata store (auto-migrates any lingering sidecars). + store := install.LoadMetadataOrNew(agentsDir) + var results []AgentCheckResult for _, d := range discovered { - result := checkOneAgent(d.SourcePath, d.RelPath) + result := checkOneAgent(store, d.SourcePath, d.RelPath) results = append(results, result) } return results } -// checkOneAgent checks a single agent file. sourcePath is the absolute path -// to the .md file; relPath is relative to the agents root (e.g. "demo/code-reviewer.md"). -func checkOneAgent(sourcePath, relPath string) AgentCheckResult { +// checkOneAgent checks a single agent file against the centralized metadata store. +// sourcePath is the absolute path to the .md file; relPath is relative to the +// agents root (e.g. "demo/code-reviewer.md"). +func checkOneAgent(store *install.MetadataStore, sourcePath, relPath string) AgentCheckResult { fileName := filepath.Base(relPath) - agentName := fileName[:len(fileName)-len(".md")] - result := AgentCheckResult{Name: relPath[:len(relPath)-len(".md")]} + key := relPath[:len(relPath)-len(".md")] + result := AgentCheckResult{Name: key} - // Look for sidecar metadata: .skillshare-meta.json alongside the .md file - dir := filepath.Dir(sourcePath) - metaPath := filepath.Join(dir, agentName+".skillshare-meta.json") - metaData, err := os.ReadFile(metaPath) - if err != nil { + entry := store.GetByPath(key) + if entry == nil || entry.Source == "" { result.Status = "local" return result } - var meta install.SkillMeta - if err := json.Unmarshal(metaData, &meta); err != nil { - result.Status = "error" - result.Message = "invalid metadata" - return result - } - - result.Source = meta.Source - result.Version = meta.Version - result.RepoURL = meta.RepoURL + result.Source = entry.Source + result.Version = entry.Version + result.RepoURL = entry.RepoURL // Compare file hash - if meta.FileHashes == nil || meta.FileHashes[fileName] == "" { + if entry.FileHashes == nil || entry.FileHashes[fileName] == "" { result.Status = "local" return result } @@ -78,7 +70,7 @@ func checkOneAgent(sourcePath, relPath string) AgentCheckResult { return result } - if currentHash == meta.FileHashes[fileName] { + if currentHash == entry.FileHashes[fileName] { result.Status = "up_to_date" } else { result.Status = "drifted" diff --git a/internal/hub/index.go b/internal/hub/index.go index 9e24655b..2bc3657d 100644 --- a/internal/hub/index.go +++ b/internal/hub/index.go @@ -65,10 +65,7 @@ func BuildIndex(sourcePath string, full bool, auditSkills bool) (*Index, error) } // Load centralized metadata store once for all skills. - store, _ := install.LoadMetadata(sourcePath) - if store == nil { - store = install.NewMetadataStore() - } + store := install.LoadMetadataOrNew(sourcePath) entries := make([]SkillEntry, len(discovered)) for i, d := range discovered { diff --git a/internal/install/install_apply.go b/internal/install/install_apply.go index 5f14031a..a891005b 100644 --- a/internal/install/install_apply.go +++ b/internal/install/install_apply.go @@ -1,7 +1,6 @@ package install import ( - "encoding/json" "fmt" "os" "path/filepath" @@ -443,7 +442,7 @@ func InstallAgentFromDiscovery(discovery *DiscoveryResult, agent AgentInfo, dest return nil, fmt.Errorf("failed to write agent %s: %w", agent.FileName, err) } - // Write metadata alongside the agent file (as .skillshare-meta.json) + // Write metadata to centralized .metadata.json store. source := &Source{ Type: discovery.Source.Type, Raw: result.Source, @@ -456,14 +455,12 @@ func InstallAgentFromDiscovery(discovery *DiscoveryResult, agent AgentInfo, dest if discovery.CommitHash != "" { meta.Version = discovery.CommitHash } - // For agents, file_hashes is just the single file if hash, hashErr := computeSingleFileHash(destFile); hashErr == nil { meta.FileHashes = map[string]string{agent.FileName: hash} } - metaPath := filepath.Join(destDir, agent.Name+".skillshare-meta.json") - if metaData, marshalErr := json.MarshalIndent(meta, "", " "); marshalErr == nil { - os.WriteFile(metaPath, metaData, 0644) + if err := WriteMetaToStore(opts.SourceDir, destFile, meta); err != nil { + result.Warnings = append(result.Warnings, fmt.Sprintf("failed to write metadata: %v", err)) } result.Action = "installed" diff --git a/internal/server/handler_helpers_test.go b/internal/server/handler_helpers_test.go index 48eef9de..a7c04a89 100644 --- a/internal/server/handler_helpers_test.go +++ b/internal/server/handler_helpers_test.go @@ -6,6 +6,7 @@ import ( "testing" "skillshare/internal/config" + "skillshare/internal/install" ) // newTestServer creates an isolated Server for handler testing. @@ -85,9 +86,12 @@ func addTrackedRepo(t *testing.T, sourceDir, relPath string) { } } -// addSkillMeta creates a .skillshare-meta.json for a skill (marks it as remotely installed). +// addSkillMeta writes a metadata entry into the centralized .metadata.json store. func addSkillMeta(t *testing.T, sourceDir, name, source string) { t.Helper() - meta := `{"source":"` + source + `"}` - os.WriteFile(filepath.Join(sourceDir, name, ".skillshare-meta.json"), []byte(meta), 0644) + store := install.LoadMetadataOrNew(sourceDir) + store.Set(name, &install.MetadataEntry{Source: source}) + if err := store.Save(sourceDir); err != nil { + t.Fatalf("addSkillMeta: %v", err) + } } diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 2fd995ec..350a342c 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -1,7 +1,6 @@ package server import ( - "encoding/json" "fmt" "log" "net/http" @@ -104,17 +103,16 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { SourcePath: d.SourcePath, } - // Read sidecar metadata: .skillshare-meta.json - metaPath := filepath.Join(filepath.Dir(d.SourcePath), strings.TrimSuffix(filepath.Base(d.RelPath), ".md")+".skillshare-meta.json") - if metaData, readErr := os.ReadFile(metaPath); readErr == nil { - var meta install.SkillMeta - if json.Unmarshal(metaData, &meta) == nil { - item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version + // Read from centralized agents metadata store + agentKey := strings.TrimSuffix(d.RelPath, ".md") + if entry := s.agentsStore.GetByPath(agentKey); entry != nil { + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) } + item.Source = entry.Source + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version } items = append(items, item) @@ -159,7 +157,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { if entry := s.skillsStore.GetByPath(d.RelPath); entry != nil { if !entry.InstalledAt.IsZero() { - item.InstalledAt = entry.InstalledAt.Format("2006-01-02T15:04:05Z") + item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) } item.Source = entry.Source item.Type = entry.Type @@ -223,16 +221,15 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { SourcePath: d.SourcePath, } - metaPath := filepath.Join(filepath.Dir(d.SourcePath), strings.TrimSuffix(filepath.Base(d.RelPath), ".md")+".skillshare-meta.json") - if metaData, metaReadErr := os.ReadFile(metaPath); metaReadErr == nil { - var meta install.SkillMeta - if json.Unmarshal(metaData, &meta) == nil { - item.InstalledAt = meta.InstalledAt.Format(time.RFC3339) - item.Source = meta.Source - item.Type = meta.Type - item.RepoURL = meta.RepoURL - item.Version = meta.Version + agentKey := strings.TrimSuffix(d.RelPath, ".md") + if entry := s.agentsStore.GetByPath(agentKey); entry != nil { + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) } + item.Source = entry.Source + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version } writeJSON(w, map[string]any{ From 19d88ce905d89c899ef95bb6daa89bf321549864 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 17:43:39 +0800 Subject: [PATCH 095/205] chore: update schema and red team scripts for centralized metadata - registry.schema.json: describe .metadata.json store format (version, entries map with full field set) instead of old registry.yaml format - Red team scripts: add write_store_meta helper, replace sidecar writes with centralized store writes in phase3 (integrity) and phase5 (advanced) --- schemas/registry.schema.json | 81 ++++++++++++++++++++-------- scripts/red_team/_helpers.sh | 22 ++++++++ scripts/red_team/phase3_integrity.sh | 37 +++---------- scripts/red_team/phase5_advanced.sh | 27 ++-------- 4 files changed, 90 insertions(+), 77 deletions(-) diff --git a/schemas/registry.schema.json b/schemas/registry.schema.json index 9d79d96f..00aaf7ec 100644 --- a/schemas/registry.schema.json +++ b/schemas/registry.schema.json @@ -1,41 +1,43 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://raw.githubusercontent.com/runkids/skillshare/main/schemas/registry.schema.json", - "title": "Skillshare Registry", - "description": "Skill registry for skillshare — tracks installed/tracked skills. Location: ~/.config/skillshare/skills/registry.yaml (global) or .skillshare/registry.yaml (project). Auto-managed by reconciliation; manual edits are preserved.", + "title": "Skillshare Metadata Store", + "description": "Centralized metadata store for skillshare — tracks installed skills and agents. Location: ~/.config/skillshare/skills/.metadata.json (global) or .skillshare/skills/.metadata.json (project). Auto-managed; manual edits are preserved on reconciliation.", "type": "object", "additionalProperties": false, "properties": { - "skills": { - "type": "array", - "description": "Installed/tracked skills.", - "items": { - "$ref": "#/$defs/skillEntry" + "version": { + "type": "integer", + "description": "Schema version. Currently 1.", + "const": 1 + }, + "entries": { + "type": "object", + "description": "Map of skill/agent relative paths to their metadata entries.", + "additionalProperties": { + "$ref": "#/$defs/metadataEntry" } } }, + "required": ["version", "entries"], "$defs": { - "skillEntry": { + "metadataEntry": { "type": "object", - "description": "A managed resource entry tracked in the registry.", - "required": ["name", "source"], - "additionalProperties": false, + "description": "Metadata for an installed skill or agent.", "properties": { - "name": { + "source": { "type": "string", - "description": "Resource name (skill directory name or agent file name).", - "examples": ["pdf", "_team-skills", "tutor"] + "description": "Original source input (GitHub path, local path, etc.).", + "examples": ["github.com/anthropics/skills/skills/pdf", "github.com/team/agents"] }, "kind": { "type": "string", - "description": "Resource kind. Omitted or empty defaults to 'skill'. ('agent' is reserved for a future release.)", - "enum": ["skill", "agent"], - "default": "skill" + "description": "Resource kind. Empty or omitted defaults to 'skill'.", + "enum": ["", "skill", "agent"] }, - "source": { + "type": { "type": "string", - "description": "GitHub path or local path where the resource was installed from.", - "examples": ["anthropics/skills/skills/pdf", "github.com/team/skills"] + "description": "Source type (github, local, git-https, git-ssh, etc.)." }, "tracked": { "type": "boolean", @@ -45,12 +47,45 @@ "group": { "type": "string", "description": "Subdirectory group the resource belongs to (set by --into).", - "examples": ["frontend", "backend"] + "examples": ["frontend", "backend", "team/frontend"] }, "branch": { "type": "string", - "description": "Git branch to clone from. Omit for remote default branch.", - "examples": ["develop", "frontend"] + "description": "Git branch to clone from. Omit for remote default branch." + }, + "into": { + "type": "string", + "description": "Original --into value used during install." + }, + "installed_at": { + "type": "string", + "format": "date-time", + "description": "ISO 8601 timestamp of when the resource was installed." + }, + "repo_url": { + "type": "string", + "description": "Git clone URL for the source repository.", + "examples": ["https://github.com/anthropics/skills.git"] + }, + "subdir": { + "type": "string", + "description": "Subdirectory within the repo (for monorepo installs)." + }, + "version": { + "type": "string", + "description": "Git commit hash at install/update time." + }, + "tree_hash": { + "type": "string", + "description": "Git tree SHA of the subdir at install time." + }, + "file_hashes": { + "type": "object", + "description": "Map of relative file paths to sha256: digests for integrity verification.", + "additionalProperties": { + "type": "string", + "pattern": "^sha256:[0-9a-f]{64}$" + } } } } diff --git a/scripts/red_team/_helpers.sh b/scripts/red_team/_helpers.sh index 9582f57f..f5aed0e3 100644 --- a/scripts/red_team/_helpers.sh +++ b/scripts/red_team/_helpers.sh @@ -49,6 +49,28 @@ $content SKILL_EOF } +# Write or merge a skill entry into the centralized .metadata.json store. +# Usage: write_store_meta +# Example: write_store_meta "$SOURCE_DIR" "my-skill" '{"source":"test","file_hashes":{"SKILL.md":"sha256:abc"}}' +write_store_meta() { + local source_dir="$1" + local skill_name="$2" + local entry_json="$3" + local store_path="$source_dir/.metadata.json" + + if [ -f "$store_path" ]; then + # Merge into existing store + local tmp + tmp=$(jq --arg name "$skill_name" --argjson entry "$entry_json" \ + '.entries[$name] = $entry' "$store_path") + echo "$tmp" > "$store_path" + else + # Create new store + jq -n --arg name "$skill_name" --argjson entry "$entry_json" \ + '{version:1, entries:{($name): $entry}}' > "$store_path" + fi +} + # Run skillshare with isolated config. # -g is placed after the subcommand to force global mode, # preventing auto-detection of .skillshare/ in the working directory. diff --git a/scripts/red_team/phase3_integrity.sh b/scripts/red_team/phase3_integrity.sh index 310e1a7e..b80e9583 100644 --- a/scripts/red_team/phase3_integrity.sh +++ b/scripts/red_team/phase3_integrity.sh @@ -13,16 +13,8 @@ Safe content for hash verification." SKILL_HASH=$(shasum -a 256 "$INTEGRITY_DIR/SKILL.md" | awk '{print $1}') -cat > "$INTEGRITY_DIR/.skillshare-meta.json" < "$INTEGRITY_DIR/.skillshare-meta.json" < "$INTEGRITY_DIR/.skillshare-meta.json" < "$INTEGRITY_DIR/sneaky.sh" diff --git a/scripts/red_team/phase5_advanced.sh b/scripts/red_team/phase5_advanced.sh index 84c1527b..2d77a92e 100644 --- a/scripts/red_team/phase5_advanced.sh +++ b/scripts/red_team/phase5_advanced.sh @@ -13,19 +13,8 @@ Safe content for traversal hardening checks." TRAVERSAL_HASH=$(shasum -a 256 "$TRAVERSAL_DIR/SKILL.md" | awk '{print $1}') echo "TOP SECRET" > "$TMPDIR_ROOT/secret.txt" -cat > "$TRAVERSAL_DIR/.skillshare-meta.json" < "$SYMLINK_DIR/.skillshare-meta.json" < "$TMPDIR_ROOT/outside.txt" ln -s "$TMPDIR_ROOT/outside.txt" "$SYMLINK_DIR/external-link.txt" From f95a113fce96a755adb0c8eac04c04548717334d Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 17:43:46 +0800 Subject: [PATCH 096/205] test: migrate all test metadata to centralized store format Replace .skillshare-meta.json sidecar writes with centralized .metadata.json store writes across all test files. Use install.MetadataFileName constant instead of hardcoded strings. Updated: audit_test, doctor_test, gitignore_project_test, install_online_test, list_project_test, sync_symlinked_dir_test, uninstall_project_test, uninstall_test, uninstall_trash_test, audit_output_online_test, audit_scan_skill_test, analyzer_metadata_test --- internal/audit/analyzer_metadata_test.go | 30 +++++++++----- internal/audit/audit_scan_skill_test.go | 24 +++++++---- tests/integration/audit_output_online_test.go | 41 ++++++------------- tests/integration/audit_test.go | 34 ++++++++++++--- tests/integration/doctor_test.go | 16 ++++++-- tests/integration/gitignore_project_test.go | 12 +++--- tests/integration/install_online_test.go | 23 ++++++----- tests/integration/list_project_test.go | 15 +++---- tests/integration/sync_symlinked_dir_test.go | 11 ++--- tests/integration/uninstall_project_test.go | 12 +++--- tests/integration/uninstall_test.go | 13 +++--- tests/integration/uninstall_trash_test.go | 19 ++++----- 12 files changed, 141 insertions(+), 109 deletions(-) diff --git a/internal/audit/analyzer_metadata_test.go b/internal/audit/analyzer_metadata_test.go index 53ec1146..30c05729 100644 --- a/internal/audit/analyzer_metadata_test.go +++ b/internal/audit/analyzer_metadata_test.go @@ -246,19 +246,23 @@ func TestIsWellKnownOrg(t *testing.T) { } func TestMetadataAnalyzer_Integration(t *testing.T) { - // Create a temp skill directory with SKILL.md and .skillshare-meta.json - dir := t.TempDir() + // Create a nested skill directory: root/evil-skill/SKILL.md + // with centralized metadata at root/.metadata.json + root := t.TempDir() + dir := filepath.Join(root, "evil-skill") + os.MkdirAll(dir, 0755) - // Write SKILL.md claiming "from Acme Corp" skillContent := "---\nname: evil-skill\ndescription: Official formatter from Acme Corp\n---\n# Evil\n" if err := os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte(skillContent), 0644); err != nil { t.Fatal(err) } - // Write meta pointing to a different org - meta := metaJSON{RepoURL: "https://github.com/evil-fork/skills.git"} - metaData, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(dir, ".skillshare-meta.json"), metaData, 0644); err != nil { + // Write centralized metadata in parent directory + store := metadataStoreJSON{Entries: map[string]metaJSON{ + "evil-skill": {RepoURL: "https://github.com/evil-fork/skills.git"}, + }} + storeData, _ := json.Marshal(store) + if err := os.WriteFile(filepath.Join(root, metadataFileName), storeData, 0644); err != nil { t.Fatal(err) } @@ -320,14 +324,18 @@ func TestMetadataAnalyzer_NoMeta(t *testing.T) { } func TestMetadataAnalyzer_DisabledRules(t *testing.T) { - dir := t.TempDir() + root := t.TempDir() + dir := filepath.Join(root, "test-skill") + os.MkdirAll(dir, 0755) if err := os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte("---\nname: test\ndescription: Official tool from Acme Corp\n---\n"), 0644); err != nil { t.Fatal(err) } - meta := metaJSON{RepoURL: "https://github.com/evil-fork/skills.git"} - metaData, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(dir, ".skillshare-meta.json"), metaData, 0644); err != nil { + store := metadataStoreJSON{Entries: map[string]metaJSON{ + "test-skill": {RepoURL: "https://github.com/evil-fork/skills.git"}, + }} + storeData, _ := json.Marshal(store) + if err := os.WriteFile(filepath.Join(root, metadataFileName), storeData, 0644); err != nil { t.Fatal(err) } diff --git a/internal/audit/audit_scan_skill_test.go b/internal/audit/audit_scan_skill_test.go index cf565b4b..9d229529 100644 --- a/internal/audit/audit_scan_skill_test.go +++ b/internal/audit/audit_scan_skill_test.go @@ -731,19 +731,29 @@ func sha256hex(data []byte) string { } // helper: write meta with file_hashes -func writeMetaWithHashes(t *testing.T, dir string, hashes map[string]string) { +// writeMetaWithHashes writes file hashes into the centralized .metadata.json +// store in the parent directory of skillDir (matching the production layout). +func writeMetaWithHashes(t *testing.T, skillDir string, hashes map[string]string) { t.Helper() - meta := struct { + skillName := filepath.Base(skillDir) + parentDir := filepath.Dir(skillDir) + + type entry struct { Source string `json:"source"` Type string `json:"type"` FileHashes map[string]string `json:"file_hashes"` + } + store := struct { + Version int `json:"version"` + Entries map[string]entry `json:"entries"` }{ - Source: "test", - Type: "local", - FileHashes: hashes, + Version: 1, + Entries: map[string]entry{ + skillName: {Source: "test", Type: "local", FileHashes: hashes}, + }, } - data, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(dir, ".skillshare-meta.json"), data, 0644) + data, _ := json.Marshal(store) + os.WriteFile(filepath.Join(parentDir, metadataFileName), data, 0644) } func TestScanSkill_ContentTampered(t *testing.T) { diff --git a/tests/integration/audit_output_online_test.go b/tests/integration/audit_output_online_test.go index f8585a1e..328a8078 100644 --- a/tests/integration/audit_output_online_test.go +++ b/tests/integration/audit_output_online_test.go @@ -3,12 +3,11 @@ package integration import ( - "encoding/json" - "os" "path/filepath" "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -83,39 +82,23 @@ func TestUpdateAll_AuditOutputParity_Antigravity(t *testing.T) { updateResult.AssertOutputNotContains(t, "Blocked / Rolled Back") } -// invalidateOneSkillMeta finds the first skill with a .skillshare-meta.json file -// and sets its "version" to a stale value, forcing the next update to re-install it. +// invalidateOneSkillMeta finds the first skill with metadata in the centralized +// store and sets its "version" to a stale value, forcing the next update to re-install it. func invalidateOneSkillMeta(t *testing.T, skillsDir string) { t.Helper() - entries, err := os.ReadDir(skillsDir) - if err != nil { - t.Fatalf("cannot read skills dir: %v", err) - } - - for _, e := range entries { - if !e.IsDir() { - continue - } - metaPath := filepath.Join(skillsDir, e.Name(), ".skillshare-meta.json") - data, err := os.ReadFile(metaPath) - if err != nil { + store := install.LoadMetadataOrNew(skillsDir) + for _, name := range store.List() { + entry := store.Get(name) + if entry == nil || entry.Source == "" { continue } - var meta map[string]any - if err := json.Unmarshal(data, &meta); err != nil { - continue - } - meta["version"] = "stale" - meta["tree_hash"] = "" // also clear tree hash so subdir fallback won't match - out, err := json.MarshalIndent(meta, "", " ") - if err != nil { - t.Fatalf("marshal meta: %v", err) - } - if err := os.WriteFile(metaPath, out, 0644); err != nil { - t.Fatalf("write meta: %v", err) + entry.Version = "stale" + entry.TreeHash = "" + if err := store.Save(skillsDir); err != nil { + t.Fatalf("save store: %v", err) } - t.Logf("invalidated metadata for skill %q to force re-install", e.Name()) + t.Logf("invalidated metadata for skill %q to force re-install", name) return } diff --git a/tests/integration/audit_test.go b/tests/integration/audit_test.go index c0332460..f344847e 100644 --- a/tests/integration/audit_test.go +++ b/tests/integration/audit_test.go @@ -13,6 +13,7 @@ import ( "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -875,19 +876,40 @@ func sha256Hex(data []byte) string { return hex.EncodeToString(h[:]) } -// writeMetaJSON writes a .skillshare-meta.json with the given file_hashes into dir. -func writeMetaJSON(t *testing.T, dir string, hashes map[string]string) { +// writeMetaJSON writes file_hashes for a skill into the centralized .metadata.json +// store in the parent directory of skillDir. +func writeMetaJSON(t *testing.T, skillDir string, hashes map[string]string) { t.Helper() - meta := map[string]any{ + skillName := filepath.Base(skillDir) + parentDir := filepath.Dir(skillDir) + + entry := map[string]any{ "source": "test", "type": "local", "installed_at": "2026-01-01T00:00:00Z", } if hashes != nil { - meta["file_hashes"] = hashes + entry["file_hashes"] = hashes + } + store := map[string]any{ + "version": 1, + "entries": map[string]any{skillName: entry}, + } + + // Merge with existing store if present + existingData, err := os.ReadFile(filepath.Join(parentDir, install.MetadataFileName)) + if err == nil { + var existing map[string]any + if json.Unmarshal(existingData, &existing) == nil { + if entries, ok := existing["entries"].(map[string]any); ok { + entries[skillName] = entry + store = existing + } + } } - data, _ := json.Marshal(meta) - if err := os.WriteFile(filepath.Join(dir, ".skillshare-meta.json"), data, 0644); err != nil { + + data, _ := json.Marshal(store) + if err := os.WriteFile(filepath.Join(parentDir, install.MetadataFileName), data, 0644); err != nil { t.Fatalf("writeMetaJSON: %v", err) } } diff --git a/tests/integration/doctor_test.go b/tests/integration/doctor_test.go index 42c2326f..85cbdfd2 100644 --- a/tests/integration/doctor_test.go +++ b/tests/integration/doctor_test.go @@ -10,6 +10,7 @@ import ( "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -19,9 +20,12 @@ func TestDoctor_AllGood_PassesAll(t *testing.T) { sb.CreateSkill("skill1", map[string]string{ "SKILL.md": "# Skill 1", - // Include meta with correct file hash so integrity check passes - ".skillshare-meta.json": `{"source":"test","type":"local","installed_at":"2026-01-01T00:00:00Z","file_hashes":{"SKILL.md":"sha256:c90671f17f3b99f87d8fe1a542ee2d6829d2b2cfb7684d298e44c7591d8b0712"}}`, }) + + // Write metadata to centralized store with correct file hash so integrity check passes + metaStore := `{"version":1,"entries":{"skill1":{"source":"test","type":"local","installed_at":"2026-01-01T00:00:00Z","file_hashes":{"SKILL.md":"sha256:c90671f17f3b99f87d8fe1a542ee2d6829d2b2cfb7684d298e44c7591d8b0712"}}}}` + os.WriteFile(filepath.Join(sb.SourcePath, install.MetadataFileName), []byte(metaStore), 0644) + targetPath := sb.CreateTarget("claude") // Initialize git and commit to avoid warnings @@ -551,9 +555,13 @@ func TestDoctor_JSON_AllGood(t *testing.T) { defer sb.Cleanup() sb.CreateSkill("skill1", map[string]string{ - "SKILL.md": "# Skill 1", - ".skillshare-meta.json": `{"source":"test","type":"local","installed_at":"2026-01-01T00:00:00Z","file_hashes":{"SKILL.md":"sha256:c90671f17f3b99f87d8fe1a542ee2d6829d2b2cfb7684d298e44c7591d8b0712"}}`, + "SKILL.md": "# Skill 1", }) + + // Write metadata to centralized store with correct file hash so integrity check passes + metaStore := `{"version":1,"entries":{"skill1":{"source":"test","type":"local","installed_at":"2026-01-01T00:00:00Z","file_hashes":{"SKILL.md":"sha256:c90671f17f3b99f87d8fe1a542ee2d6829d2b2cfb7684d298e44c7591d8b0712"}}}}` + os.WriteFile(filepath.Join(sb.SourcePath, install.MetadataFileName), []byte(metaStore), 0644) + targetPath := sb.CreateTarget("claude") // Initialize git and commit to avoid warnings diff --git a/tests/integration/gitignore_project_test.go b/tests/integration/gitignore_project_test.go index ab7dd0d4..fa7d382f 100644 --- a/tests/integration/gitignore_project_test.go +++ b/tests/integration/gitignore_project_test.go @@ -3,12 +3,12 @@ package integration import ( - "encoding/json" "os" "path/filepath" "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -37,13 +37,13 @@ func TestGitignoreProject_UninstallRemovesEntry(t *testing.T) { defer sb.Cleanup() projectRoot := sb.SetupProjectDir("claude") - // Create remote skill with meta - skillDir := sb.CreateProjectSkill(projectRoot, "removable", map[string]string{ + // Create remote skill with meta in centralized store + sb.CreateProjectSkill(projectRoot, "removable", map[string]string{ "SKILL.md": "# Removable", }) - meta := map[string]interface{}{"source": "org/removable", "type": "github"} - metaJSON, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), metaJSON, 0644) + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + metaStore := `{"version":1,"entries":{"removable":{"source":"org/removable","type":"github"}}}` + os.WriteFile(filepath.Join(skillsDir, install.MetadataFileName), []byte(metaStore), 0644) // Write gitignore with the entry sb.WriteFile(filepath.Join(projectRoot, ".skillshare", ".gitignore"), diff --git a/tests/integration/install_online_test.go b/tests/integration/install_online_test.go index 72a50ab2..a1afec19 100644 --- a/tests/integration/install_online_test.go +++ b/tests/integration/install_online_test.go @@ -5,9 +5,9 @@ package integration import ( "os" "path/filepath" - "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -102,16 +102,19 @@ targets: {} t.Fatalf("did not expect .git directory for subdir API install") } - metaPath := filepath.Join(skillDir, ".skillshare-meta.json") - metaRaw, err := os.ReadFile(metaPath) - if err != nil { - t.Fatalf("failed to read metadata: %v", err) + // Verify metadata in centralized .metadata.json store + store, storeErr := install.LoadMetadata(sb.SourcePath) + if storeErr != nil { + t.Fatalf("failed to load metadata store: %v", storeErr) } - meta := string(metaRaw) - if !strings.Contains(meta, "\"source\": \"github.com/majiayu000/claude-skill-registry/skills/documents/atlassian-search\"") { - t.Fatalf("expected metadata source to preserve subdir source, got: %s", meta) + entry := store.Get("atlassian-search") + if entry == nil { + t.Fatal("expected metadata entry for atlassian-search in centralized store") } - if !strings.Contains(meta, "\"subdir\": \"skills/documents/atlassian-search\"") { - t.Fatalf("expected metadata subdir to match install path, got: %s", meta) + if entry.Source != "github.com/majiayu000/claude-skill-registry/skills/documents/atlassian-search" { + t.Fatalf("expected source to preserve subdir, got: %s", entry.Source) + } + if entry.Subdir != "skills/documents/atlassian-search" { + t.Fatalf("expected subdir to match install path, got: %s", entry.Subdir) } } diff --git a/tests/integration/list_project_test.go b/tests/integration/list_project_test.go index 59a842dc..eb89092d 100644 --- a/tests/integration/list_project_test.go +++ b/tests/integration/list_project_test.go @@ -3,12 +3,12 @@ package integration import ( - "encoding/json" "os" "path/filepath" "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -22,16 +22,13 @@ func TestListProject_ShowsLocalAndRemote(t *testing.T) { "SKILL.md": "# Local", }) - // Remote skill (with meta) - skillDir := sb.CreateProjectSkill(projectRoot, "remote-skill", map[string]string{ + // Remote skill (with meta in centralized store) + sb.CreateProjectSkill(projectRoot, "remote-skill", map[string]string{ "SKILL.md": "# Remote", }) - meta := map[string]interface{}{ - "source": "someone/skills/remote-skill", - "type": "github", - } - metaJSON, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), metaJSON, 0644) + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + metaStore := `{"version":1,"entries":{"remote-skill":{"source":"someone/skills/remote-skill","type":"github"}}}` + os.WriteFile(filepath.Join(skillsDir, install.MetadataFileName), []byte(metaStore), 0644) result := sb.RunCLIInDir(projectRoot, "list", "-p") result.AssertSuccess(t) diff --git a/tests/integration/sync_symlinked_dir_test.go b/tests/integration/sync_symlinked_dir_test.go index 3d41057a..898283de 100644 --- a/tests/integration/sync_symlinked_dir_test.go +++ b/tests/integration/sync_symlinked_dir_test.go @@ -7,6 +7,7 @@ import ( "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -334,8 +335,8 @@ func TestUpdateGroup_ExternalSymlinkRejected(t *testing.T) { os.MkdirAll(filepath.Join(externalDir, "victim"), 0755) os.WriteFile(filepath.Join(externalDir, "victim", "SKILL.md"), []byte("---\nname: victim\n---\n# Victim"), 0644) - os.WriteFile(filepath.Join(externalDir, "victim", ".skillshare-meta.json"), - []byte(`{"source":"github.com/example/victim","installed_at":"2025-01-01T00:00:00Z"}`), 0644) + os.WriteFile(filepath.Join(externalDir, install.MetadataFileName), + []byte(`{"version":1,"entries":{"victim":{"source":"github.com/example/victim","installed_at":"2025-01-01T00:00:00Z"}}}`), 0644) // Symlink a group inside source to the external location os.Symlink(externalDir, filepath.Join(sb.SourcePath, "evil-group")) @@ -356,13 +357,13 @@ func TestUpdateAll_SymlinkedSource(t *testing.T) { realSource := filepath.Join(sb.Root, "dotfiles", "skills") os.MkdirAll(realSource, 0755) - // Create a skill with metadata + // Create a skill with metadata in centralized store skillDir := filepath.Join(realSource, "remote-skill") os.MkdirAll(skillDir, 0755) os.WriteFile(filepath.Join(skillDir, "SKILL.md"), []byte("---\nname: remote-skill\n---\n# Remote"), 0644) - os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), - []byte(`{"source":"github.com/example/remote","installed_at":"2025-01-01T00:00:00Z"}`), 0644) + os.WriteFile(filepath.Join(realSource, install.MetadataFileName), + []byte(`{"version":1,"entries":{"remote-skill":{"source":"github.com/example/remote","installed_at":"2025-01-01T00:00:00Z"}}}`), 0644) os.RemoveAll(sb.SourcePath) if err := os.Symlink(realSource, sb.SourcePath); err != nil { diff --git a/tests/integration/uninstall_project_test.go b/tests/integration/uninstall_project_test.go index 0a805eb5..f6240cdb 100644 --- a/tests/integration/uninstall_project_test.go +++ b/tests/integration/uninstall_project_test.go @@ -3,7 +3,6 @@ package integration import ( - "encoding/json" "os" "path/filepath" "testing" @@ -47,13 +46,14 @@ func TestUninstallProject_UpdatesConfig(t *testing.T) { defer sb.Cleanup() projectRoot := sb.SetupProjectDir("claude") - // Create remote skill with meta - skillDir := sb.CreateProjectSkill(projectRoot, "remote", map[string]string{ + // Create remote skill with meta in centralized store + sb.CreateProjectSkill(projectRoot, "remote", map[string]string{ "SKILL.md": "# Remote", }) - meta := map[string]interface{}{"source": "org/skills/remote", "type": "github"} - metaJSON, _ := json.Marshal(meta) - os.WriteFile(filepath.Join(skillDir, ".skillshare-meta.json"), metaJSON, 0644) + skillsDir := filepath.Join(projectRoot, ".skillshare", "skills") + metaStore := install.NewMetadataStore() + metaStore.Set("remote", &install.MetadataEntry{Source: "org/skills/remote", Type: "github"}) + metaStore.Save(skillsDir) // Write config and registry with the skill sb.WriteProjectConfig(projectRoot, `targets: diff --git a/tests/integration/uninstall_test.go b/tests/integration/uninstall_test.go index ffb39d4b..2714b525 100644 --- a/tests/integration/uninstall_test.go +++ b/tests/integration/uninstall_test.go @@ -172,15 +172,16 @@ func TestUninstall_ShowsMetadata(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() - // Create skill with metadata (simulating installed skill) + // Create skill with metadata in centralized store (simulating installed skill) sb.CreateSkill("meta-skill", map[string]string{ "SKILL.md": "# Meta Skill", - ".skillshare-meta.json": `{ - "source": "github.com/user/repo", - "type": "github", - "installed_at": "2024-01-15T10:30:00Z" -}`, }) + metaStore := install.NewMetadataStore() + metaStore.Set("meta-skill", &install.MetadataEntry{ + Source: "github.com/user/repo", + Type: "github", + }) + metaStore.Save(sb.SourcePath) sb.WriteConfig(`source: ` + sb.SourcePath + ` targets: {} diff --git a/tests/integration/uninstall_trash_test.go b/tests/integration/uninstall_trash_test.go index ab1ba639..d60861ae 100644 --- a/tests/integration/uninstall_trash_test.go +++ b/tests/integration/uninstall_trash_test.go @@ -8,6 +8,7 @@ import ( "strings" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -52,12 +53,11 @@ func TestUninstall_WithMeta_PrintsReinstallHint(t *testing.T) { sb.CreateSkill("remote-skill", map[string]string{ "SKILL.md": "# Remote Skill", - ".skillshare-meta.json": `{ - "source": "github.com/user/skills/remote-skill", - "type": "github", - "installed_at": "2026-01-15T10:30:00Z" -}`, }) + // Write metadata to centralized store + metaStore := `{"version":1,"entries":{"remote-skill":{"source":"github.com/user/skills/remote-skill","type":"github","installed_at":"2026-01-15T10:30:00Z"}}}` + os.WriteFile(filepath.Join(sb.SourcePath, install.MetadataFileName), []byte(metaStore), 0644) + sb.WriteConfig(`source: ` + sb.SourcePath + ` targets: {} `) @@ -94,12 +94,11 @@ func TestUninstall_DryRun_ShowsTrashPreview(t *testing.T) { sb.CreateSkill("preview-skill", map[string]string{ "SKILL.md": "# Preview", - ".skillshare-meta.json": `{ - "source": "github.com/org/repo/preview-skill", - "type": "github", - "installed_at": "2026-01-15T10:30:00Z" -}`, }) + // Write metadata to centralized store + metaStore := `{"version":1,"entries":{"preview-skill":{"source":"github.com/org/repo/preview-skill","type":"github","installed_at":"2026-01-15T10:30:00Z"}}}` + os.WriteFile(filepath.Join(sb.SourcePath, install.MetadataFileName), []byte(metaStore), 0644) + sb.WriteConfig(`source: ` + sb.SourcePath + ` targets: {} `) From d16c4f38120fb6f023388dbf4f8e3395a1588736 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 18:06:20 +0800 Subject: [PATCH 097/205] fix: correct metadata store routing, corruption handling, and lookup depth Three bugs introduced during centralized metadata migration: 1. Batch agent installs via UI wrote metadata to skills/.metadata.json instead of agents/.metadata.json because handleInstallBatch hard-coded SourceDir to s.cfg.Source. Override SourceDir with agentsSource() when body.Kind is agent. 2. CheckAgents used LoadMetadataOrNew which silently converts parse failures into an empty store, making all tracked agents appear local and suppressing drift/update diagnostics. Switch to LoadMetadata and surface corruption as status=error with the parse error message. 3. findMetaEntry only climbed 3 parent directories when searching for .metadata.json, but --into supports arbitrary nesting depth. Raise the ceiling to 10 (root sentinel still terminates early). --- internal/audit/analyzer_metadata.go | 2 +- internal/audit/analyzer_metadata_test.go | 34 +++++++++++ internal/check/agent_check.go | 12 +++- internal/check/agent_check_test.go | 20 +++++++ internal/server/handler_install.go | 3 + internal/server/handler_install_test.go | 72 ++++++++++++++++++++++++ 6 files changed, 141 insertions(+), 2 deletions(-) create mode 100644 internal/server/handler_install_test.go diff --git a/internal/audit/analyzer_metadata.go b/internal/audit/analyzer_metadata.go index 016e0f39..f08d67bc 100644 --- a/internal/audit/analyzer_metadata.go +++ b/internal/audit/analyzer_metadata.go @@ -88,7 +88,7 @@ func findMetaEntry(skillPath string) *metaJSON { skillName := filepath.Base(skillPath) dir := filepath.Dir(skillPath) - for i := 0; i < 3 && dir != filepath.Dir(dir); i++ { + for i := 0; i < 10 && dir != filepath.Dir(dir); i++ { data, err := os.ReadFile(filepath.Join(dir, metadataFileName)) if err == nil { var store metadataStoreJSON diff --git a/internal/audit/analyzer_metadata_test.go b/internal/audit/analyzer_metadata_test.go index 30c05729..f2615976 100644 --- a/internal/audit/analyzer_metadata_test.go +++ b/internal/audit/analyzer_metadata_test.go @@ -302,6 +302,40 @@ func TestMetadataAnalyzer_Integration(t *testing.T) { } } +func TestMetadataAnalyzer_Integration_DeepNestedSkill(t *testing.T) { + root := t.TempDir() + dir := filepath.Join(root, "a", "b", "c", "d", "evil-skill") + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatal(err) + } + + skillContent := "---\nname: evil-skill\ndescription: Official formatter from Acme Corp\n---\n# Evil\n" + if err := os.WriteFile(filepath.Join(dir, "SKILL.md"), []byte(skillContent), 0644); err != nil { + t.Fatal(err) + } + + store := metadataStoreJSON{Entries: map[string]metaJSON{ + "a/b/c/d/evil-skill": {RepoURL: "https://github.com/evil-fork/skills.git"}, + }} + storeData, _ := json.Marshal(store) + if err := os.WriteFile(filepath.Join(root, metadataFileName), storeData, 0644); err != nil { + t.Fatal(err) + } + + a := &metadataAnalyzer{} + ctx := &AnalyzeContext{ + SkillPath: dir, + DisabledIDs: map[string]bool{}, + } + findings, err := a.Analyze(ctx) + if err != nil { + t.Fatal(err) + } + if len(findings) < 2 { + t.Fatalf("expected deep nested skill to resolve metadata, got %d findings", len(findings)) + } +} + func TestMetadataAnalyzer_NoMeta(t *testing.T) { // Skill without .skillshare-meta.json — should produce no findings dir := t.TempDir() diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go index 4bdccbe2..1b530f36 100644 --- a/internal/check/agent_check.go +++ b/internal/check/agent_check.go @@ -28,10 +28,20 @@ func CheckAgents(agentsDir string) []AgentCheckResult { } // Load centralized metadata store (auto-migrates any lingering sidecars). - store := install.LoadMetadataOrNew(agentsDir) + store, loadErr := install.LoadMetadata(agentsDir) var results []AgentCheckResult for _, d := range discovered { + if loadErr != nil { + // Surface corruption instead of silently treating all agents as local. + key := d.RelPath[:len(d.RelPath)-len(".md")] + results = append(results, AgentCheckResult{ + Name: key, + Status: "error", + Message: "invalid metadata: " + loadErr.Error(), + }) + continue + } result := checkOneAgent(store, d.SourcePath, d.RelPath) results = append(results, result) } diff --git a/internal/check/agent_check_test.go b/internal/check/agent_check_test.go index faffaffd..6ddf34c4 100644 --- a/internal/check/agent_check_test.go +++ b/internal/check/agent_check_test.go @@ -80,6 +80,26 @@ func TestCheckAgents_Drifted(t *testing.T) { } } +func TestCheckAgents_InvalidCentralizedMetadata(t *testing.T) { + dir := t.TempDir() + os.WriteFile(filepath.Join(dir, "tutor.md"), []byte("# Tutor"), 0644) + os.WriteFile(filepath.Join(dir, install.MetadataFileName), []byte("{invalid"), 0644) + + results := CheckAgents(dir) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Name != "tutor" { + t.Errorf("Name = %q, want %q", results[0].Name, "tutor") + } + if results[0].Status != "error" { + t.Errorf("Status = %q, want %q", results[0].Status, "error") + } + if results[0].Message == "" { + t.Fatal("expected error message for invalid centralized metadata") + } +} + func TestCheckAgents_NonExistentDir(t *testing.T) { results := CheckAgents("/nonexistent/path") if results != nil { diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index 0c247c9a..b76416a6 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -157,6 +157,9 @@ func (s *Server) handleInstallBatch(w http.ResponseWriter, r *http.Request) { installOpts.AuditProjectRoot = s.projectRoot } isAgent := body.Kind == "agent" + if isAgent { + installOpts.SourceDir = s.agentsSource() + } for _, sel := range body.Skills { skillName := sel.Name diff --git a/internal/server/handler_install_test.go b/internal/server/handler_install_test.go new file mode 100644 index 00000000..4f265404 --- /dev/null +++ b/internal/server/handler_install_test.go @@ -0,0 +1,72 @@ +package server + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "os/exec" + "path/filepath" + "testing" + + "skillshare/internal/install" +) + +func TestHandleInstallBatch_AgentInstallWritesMetadataToAgentsSource(t *testing.T) { + s, skillsDir := newTestServer(t) + + agentsDir := filepath.Join(t.TempDir(), "agents") + if err := os.MkdirAll(agentsDir, 0755); err != nil { + t.Fatalf("failed to create agents dir: %v", err) + } + s.cfg.AgentsSource = agentsDir + s.agentsStore = install.NewMetadataStore() + + repoDir := t.TempDir() + initGitRepo(t, repoDir) + + agentPath := filepath.Join(repoDir, "reviewer.md") + if err := os.WriteFile(agentPath, []byte("# Reviewer agent"), 0644); err != nil { + t.Fatalf("failed to write agent file: %v", err) + } + for _, args := range [][]string{ + {"add", "reviewer.md"}, + {"commit", "-m", "add reviewer agent"}, + } { + cmd := exec.Command("git", args...) + cmd.Dir = repoDir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git %v failed: %s %v", args, out, err) + } + } + + payload, err := json.Marshal(map[string]any{ + "source": "file://" + repoDir, + "skills": []map[string]string{ + {"name": "reviewer", "path": "reviewer.md"}, + }, + "kind": "agent", + }) + if err != nil { + t.Fatalf("failed to marshal payload: %v", err) + } + + req := httptest.NewRequest(http.MethodPost, "/api/install/batch", bytes.NewReader(payload)) + rr := httptest.NewRecorder() + s.mux.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("unexpected status: got %d, body=%s", rr.Code, rr.Body.String()) + } + + if _, err := os.Stat(filepath.Join(agentsDir, "reviewer.md")); err != nil { + t.Fatalf("expected installed agent in agents source: %v", err) + } + if _, err := os.Stat(filepath.Join(agentsDir, install.MetadataFileName)); err != nil { + t.Fatalf("expected metadata written to agents source: %v", err) + } + if _, err := os.Stat(filepath.Join(skillsDir, install.MetadataFileName)); !os.IsNotExist(err) { + t.Fatalf("expected no agent metadata written to skills source, got err=%v", err) + } +} From 585b3cdde708cc05cbb8c987b3c49099e79fd52b Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 18:35:17 +0800 Subject: [PATCH 098/205] feat(tui): add [S]/[A] kind prefix in All tab and group by kind MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the All (resources) tab: - Show [S] prefix for skills and [A] prefix for agents - Group items by kind: skills first, agents second - Each kind block preserves original repo grouping with kind-prefixed headers (e.g. 'Skills · standalone') In Skills/Agents tabs: no prefix shown (context is clear). Detail panel now shows 'Skill' or 'Agent' kind label in the status bits alongside remote/local/tracked. Spinner message is now tab-aware: 'Loading resources/skills/agents'. Implementation uses a shared *listTab pointer between the model and delegate so the delegate reads current tab state without needing SetDelegate() on every tab switch. --- cmd/skillshare/list_tui.go | 54 ++++++++++++++++++++++++--------- cmd/skillshare/list_tui_item.go | 47 +++++++++++++++++++--------- 2 files changed, 73 insertions(+), 28 deletions(-) diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index 263d894f..918c0b9b 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -89,6 +89,7 @@ type listTUIModel struct { // Tab filter — pre-filters allItems by kind (All / Skills / Agents) activeTab listTab // currently selected tab + activeTabP *listTab // shared pointer for delegate to read current tab tabCounts [3]int // cached counts: [all, skills, agents] tabFiltered []skillItem // cached result of tabFilteredItems(); set by applyFilter() @@ -124,7 +125,21 @@ type listTUIModel struct { // When loadFn is non-nil, skills are loaded asynchronously inside the TUI (spinner shown). // When loadFn is nil, skills/totalCount are used directly (pre-loaded). func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, modeLabel, sourcePath, agentsSourcePath string, targets map[string]config.TargetConfig, initialKind resourceKindFilter) listTUIModel { - delegate := listSkillDelegate{} + // Map CLI kind filter to initial tab + var initTab listTab + switch initialKind { + case kindAgents: + initTab = listTabAgents + case kindSkills: + initTab = listTabSkills + default: + initTab = listTabAll + } + + // Shared pointer lets the delegate read the current tab without re-creation. + tabPtr := new(listTab) + *tabPtr = initTab + delegate := listSkillDelegate{activeTab: tabPtr} // Build initial item set (empty if async loading) var items []list.Item @@ -156,17 +171,6 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode fi.Cursor.Style = tc.Filter fi.Placeholder = "filter or t:tracked g:group r:repo k:kind" - // Map CLI kind filter to initial tab - var initTab listTab - switch initialKind { - case kindAgents: - initTab = listTabAgents - case kindSkills: - initTab = listTabSkills - default: - initTab = listTabAll - } - m := listTUIModel{ list: l, totalCount: totalCount, @@ -175,6 +179,7 @@ func newListTUIModel(loadFn listLoadFn, skills []skillItem, totalCount int, mode agentsSourcePath: agentsSourcePath, targets: targets, activeTab: initTab, + activeTabP: tabPtr, detailCache: make(map[string]*detailData), loading: loadFn != nil, loadSpinner: sp, @@ -206,9 +211,20 @@ func (m *listTUIModel) recomputeTabCounts() { } // tabFilteredItems returns the subset of allItems matching the active tab. +// For listTabAll, items are reordered so skills come first then agents, +// keeping each kind's original order intact. func (m *listTUIModel) tabFilteredItems() []skillItem { if m.activeTab == listTabAll { - return m.allItems + skills := make([]skillItem, 0, m.tabCounts[1]) + agents := make([]skillItem, 0, m.tabCounts[2]) + for _, item := range m.allItems { + if item.entry.Kind == "agent" { + agents = append(agents, item) + } else { + skills = append(skills, item) + } + } + return append(skills, agents...) } wantAgent := m.activeTab == listTabAgents cap := m.tabCounts[1] @@ -399,12 +415,14 @@ func (m listTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return m, tea.Quit case "tab": m.activeTab = (m.activeTab + 1) % 3 + *m.activeTabP = m.activeTab m.applyFilter() m.updateTitle() skipGroupItem(&m.list, 1) return m, nil case "shift+tab": m.activeTab = (m.activeTab - 1 + 3) % 3 + *m.activeTabP = m.activeTab m.applyFilter() m.updateTitle() skipGroupItem(&m.list, 1) @@ -546,7 +564,7 @@ func (m listTUIModel) View() string { // Loading state — spinner + message if m.loading { - return fmt.Sprintf("\n %s Loading skills...\n", m.loadSpinner.View()) + return fmt.Sprintf("\n %s Loading %s...\n", m.loadSpinner.View(), m.activeTab.noun()) } // Content viewer — dual-pane @@ -1005,6 +1023,14 @@ func renderDetailParagraph(lines []string) []string { func detailStatusBits(e skillEntry) string { var bits []string + + // Kind label (Agent / Skill) + if e.Kind == "agent" { + bits = append(bits, tc.Cyan.Render("Agent")) + } else { + bits = append(bits, tc.Cyan.Render("Skill")) + } + switch { case e.RepoName != "": bits = append(bits, tc.Green.Render("tracked")) diff --git a/cmd/skillshare/list_tui_item.go b/cmd/skillshare/list_tui_item.go index e4be8993..2f2d7dae 100644 --- a/cmd/skillshare/list_tui_item.go +++ b/cmd/skillshare/list_tui_item.go @@ -27,7 +27,10 @@ func (g groupItem) Title() string { return g.label } func (g groupItem) Description() string { return "" } // listSkillDelegate renders a compact single-line browser row for the list TUI. -type listSkillDelegate struct{} +// activeTab is a shared pointer so the delegate sees tab changes without re-creation. +type listSkillDelegate struct { + activeTab *listTab // nil-safe: treat nil as listTabAll +} func (listSkillDelegate) Height() int { return 1 } func (listSkillDelegate) Spacing() int { return 0 } @@ -35,7 +38,7 @@ func (listSkillDelegate) Update(_ tea.Msg, _ *list.Model) tea.Cmd { return nil } -func (listSkillDelegate) Render(w io.Writer, m list.Model, index int, item list.Item) { +func (d listSkillDelegate) Render(w io.Writer, m list.Model, index int, item list.Item) { width := m.Width() if width <= 0 { width = 40 @@ -46,7 +49,8 @@ func (listSkillDelegate) Render(w io.Writer, m list.Model, index int, item list. renderGroupRow(w, v, width) case skillItem: selected := index == m.Index() - renderSkillRow(w, v, width, selected) + allTab := d.activeTab != nil && *d.activeTab == listTabAll + renderSkillRow(w, v, width, selected, allTab) } } @@ -66,8 +70,8 @@ func renderGroupRow(w io.Writer, g groupItem, width int) { fmt.Fprint(w, tc.Dim.Render("─ ")+label+" "+tc.Dim.Render(line)) } -func renderSkillRow(w io.Writer, skill skillItem, width int, selected bool) { - renderPrefixRow(w, skillTitleLine(skill.entry), width, selected) +func renderSkillRow(w io.Writer, skill skillItem, width int, selected bool, allTab bool) { + renderPrefixRow(w, skillTitleLine(skill.entry, allTab), width, selected) } // renderPrefixRow renders a single-line list row with a "▌" prefix bar. @@ -201,14 +205,18 @@ func (i skillItem) Description() string { return "" } -func skillTitleLine(e skillEntry) string { +func skillTitleLine(e skillEntry, allTab bool) string { if e.Disabled { // Disabled: dim the entire name + ⊘ prefix return tc.Dim.Render("⊘ " + compactSkillPath(e)) } var prefix string - if e.Kind == "agent" { - prefix = tc.Cyan.Render("[A]") + " " + if allTab { + if e.Kind == "agent" { + prefix = tc.Cyan.Render("[A]") + " " + } else { + prefix = tc.Cyan.Render("[S]") + " " + } } title := prefix + colorSkillPath(compactSkillPath(e)) if badge := skillTypeBadge(e); badge != "" { @@ -325,13 +333,16 @@ func toSkillItems(entries []skillEntry) []skillItem { // buildGroupedItems inserts groupItem separators before each repo/local group. // Skills must be sorted by RelPath (tracked repos with "_" prefix sort first). // If all skills belong to a single group (e.g. all standalone), no separators are added. +// When items contain mixed kinds (skills + agents), groups are keyed by kind+repo +// so skills and agents stay in separate blocks. func buildGroupedItems(skills []skillItem) []list.Item { // Check if there are multiple groups. groups := map[string]bool{} + hasMultiKinds := false for _, s := range skills { - groups[s.entry.RepoName] = true - if len(groups) > 1 { - break + groups[s.entry.Kind+"\x00"+s.entry.RepoName] = true + if !hasMultiKinds && len(skills) > 0 && s.entry.Kind != skills[0].entry.Kind { + hasMultiKinds = true } } @@ -361,12 +372,20 @@ func buildGroupedItems(skills []skillItem) []list.Item { } for _, s := range skills { - key := s.entry.RepoName // "" for local + key := s.entry.Kind + "\x00" + s.entry.RepoName if key != currentGroup { flush() label := "standalone" - if key != "" { - label = strings.TrimPrefix(key, "_") + if s.entry.RepoName != "" { + label = strings.TrimPrefix(s.entry.RepoName, "_") + } + // Prefix with kind when mixed to visually separate skills/agents + if hasMultiKinds { + kindPrefix := "Skills" + if s.entry.Kind == "agent" { + kindPrefix = "Agents" + } + label = kindPrefix + " · " + label } items = append(items, groupItem{label: label}) currentGroup = key From 27f352e5d21138e29491f66adcd6531178552237 Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 18:35:36 +0800 Subject: [PATCH 099/205] fix: resolve relative paths in project mode extras and agent targets Extract resolveProjectPath() helper to centralize ~ expansion and relative-to-project-root resolution. Fixes extras sync and agent target path resolution in project mode when paths are relative. Add integration tests for sync --all agents/extras overlap detection in both global and project modes. --- cmd/skillshare/project_paths.go | 22 +++++++ cmd/skillshare/status_project.go | 7 +- cmd/skillshare/sync_extras.go | 8 +-- tests/integration/sync_extras_test.go | 92 +++++++++++++++++++++++++++ 4 files changed, 120 insertions(+), 9 deletions(-) create mode 100644 cmd/skillshare/project_paths.go diff --git a/cmd/skillshare/project_paths.go b/cmd/skillshare/project_paths.go new file mode 100644 index 00000000..1fadef7b --- /dev/null +++ b/cmd/skillshare/project_paths.go @@ -0,0 +1,22 @@ +package main + +import ( + "path/filepath" + + "skillshare/internal/config" +) + +// resolveProjectPath expands ~ and resolves relative project paths against +// the project root so all project-mode comparisons use a single path form. +func resolveProjectPath(projectRoot, path string) string { + if path == "" { + return "" + } + + resolved := config.ExpandPath(path) + if !filepath.IsAbs(resolved) { + return filepath.Join(projectRoot, filepath.FromSlash(resolved)) + } + + return resolved +} diff --git a/cmd/skillshare/status_project.go b/cmd/skillshare/status_project.go index c91c07dc..ec857b7f 100644 --- a/cmd/skillshare/status_project.go +++ b/cmd/skillshare/status_project.go @@ -199,13 +199,10 @@ func buildProjectAgentStatusJSON(rt *projectRuntime) *statusJSONAgents { func resolveProjectAgentTargetPath(entry config.ProjectTargetEntry, builtinAgents map[string]config.TargetConfig, projectRoot string) string { ac := entry.AgentsConfig() if ac.Path != "" { - if filepath.IsAbs(ac.Path) { - return config.ExpandPath(ac.Path) - } - return filepath.Join(projectRoot, ac.Path) + return resolveProjectPath(projectRoot, ac.Path) } if builtin, ok := builtinAgents[entry.Name]; ok { - return config.ExpandPath(builtin.Path) + return resolveProjectPath(projectRoot, builtin.Path) } return "" } diff --git a/cmd/skillshare/sync_extras.go b/cmd/skillshare/sync_extras.go index d41f64cc..f7084e85 100644 --- a/cmd/skillshare/sync_extras.go +++ b/cmd/skillshare/sync_extras.go @@ -319,10 +319,7 @@ func cmdSyncExtrasProject(cwd string, dryRun, force, jsonOutput bool, start time } // Expand ~ and resolve relative paths against project root - targetPath := config.ExpandPath(target.Path) - if !filepath.IsAbs(targetPath) { - targetPath = filepath.Join(cwd, targetPath) - } + targetPath := resolveProjectPath(cwd, target.Path) // Skip extras "agents" targets that overlap with the agents sync system if extra.Name == extrasAgentsName && isExtrasTargetOverlappingAgents(targetPath, agentTargetPaths) { @@ -465,6 +462,9 @@ func runExtrasSyncEntries(extras []config.ExtraConfig, sourceFunc func(config.Ex mode = "merge" } targetPath := config.ExpandPath(target.Path) + if projectRoot != "" { + targetPath = resolveProjectPath(projectRoot, target.Path) + } if extra.Name == extrasAgentsName && isExtrasTargetOverlappingAgents(targetPath, agentTargetPaths) { entry.Targets = append(entry.Targets, syncExtrasJSONTarget{ diff --git a/tests/integration/sync_extras_test.go b/tests/integration/sync_extras_test.go index 7ea59004..cbddfb89 100644 --- a/tests/integration/sync_extras_test.go +++ b/tests/integration/sync_extras_test.go @@ -749,3 +749,95 @@ extras: t.Error("extra-agent.md should be synced to non-overlapping target") } } + +func TestSyncAll_AgentsExtrasOverlap_WarnsAndPreservesAgents_Global(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("placeholder", map[string]string{ + "SKILL.md": "# Placeholder", + }) + targetPath := sb.CreateTarget("claude") + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "demo.md"), []byte("# Demo Agent"), 0644) + + sourceRoot := filepath.Dir(sb.SourcePath) + extrasAgentsSource := filepath.Join(sourceRoot, "extras", "agents") + os.MkdirAll(extrasAgentsSource, 0755) + os.WriteFile(filepath.Join(extrasAgentsSource, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + path: ` + targetPath + ` +extras: + - name: agents + targets: + - path: ` + claudeAgents + ` +`) + + result := sb.RunCLI("sync", "--all") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Skipping extras") + result.AssertAnyOutputContains(t, "already managed by agents sync") + + if !sb.IsSymlink(filepath.Join(claudeAgents, "demo.md")) { + t.Error("demo agent should remain synced in global mode") + } + if sb.FileExists(filepath.Join(claudeAgents, "extra-agent.md")) { + t.Error("extras agent file should not be synced when target overlaps agents sync") + } +} + +func TestSyncAll_AgentsExtrasOverlap_WarnsAndPreservesAgents_Project(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectRoot := sb.SetupProjectDir("claude", "cursor") + + projectAgents := filepath.Join(projectRoot, ".skillshare", "agents") + os.MkdirAll(projectAgents, 0755) + os.WriteFile(filepath.Join(projectAgents, "demo.md"), []byte("# Demo Agent"), 0644) + + projectExtrasAgents := filepath.Join(projectRoot, ".skillshare", "extras", "agents") + os.MkdirAll(projectExtrasAgents, 0755) + os.WriteFile(filepath.Join(projectExtrasAgents, "extra-agent.md"), []byte("# Extra Agent"), 0644) + + claudeAgents := filepath.Join(projectRoot, ".claude", "agents") + cursorAgents := filepath.Join(projectRoot, ".cursor", "agents") + os.MkdirAll(claudeAgents, 0755) + os.MkdirAll(cursorAgents, 0755) + + sb.WriteProjectConfig(projectRoot, `targets: + - claude + - cursor +extras: + - name: agents + targets: + - path: .claude/agents + - path: .cursor/agents +`) + + result := sb.RunCLIInDir(projectRoot, "sync", "--all", "-p") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Skipping extras") + result.AssertAnyOutputContains(t, "already managed by agents sync") + + if !sb.IsSymlink(filepath.Join(claudeAgents, "demo.md")) { + t.Error("claude demo agent should remain synced in project mode") + } + if !sb.IsSymlink(filepath.Join(cursorAgents, "demo.md")) { + t.Error("cursor demo agent should remain synced in project mode") + } + if sb.FileExists(filepath.Join(claudeAgents, "extra-agent.md")) { + t.Error("project extras agent file should not be synced to claude when target overlaps agents sync") + } + if sb.FileExists(filepath.Join(cursorAgents, "extra-agent.md")) { + t.Error("project extras agent file should not be synced to cursor when target overlaps agents sync") + } +} From 43c5154a78acb721901fa38fc791e004aee682aa Mon Sep 17 00:00:00 2001 From: Willie Date: Wed, 8 Apr 2026 20:52:29 +0800 Subject: [PATCH 100/205] Flatten nested agent sync names Align agent target naming with the skills-style __ flattening so nested agents do not collide by basename in flat agent directories. Add unit and project-mode integration coverage to verify repeated syncs stay stable and no longer report false updates. --- cmd/skillshare/trash_tui.go | 79 +++++++++-- cmd/skillshare/trash_tui_test.go | 46 +++++++ internal/resource/agent.go | 10 +- internal/resource/kind_test.go | 8 +- internal/server/handler_trash.go | 138 ++++++++++++++++--- internal/server/handler_trash_test.go | 63 ++++++++- internal/server/server.go | 9 ++ internal/sync/agent_sync_test.go | 73 +++++++++- internal/trash/trash.go | 32 ++++- internal/trash/trash_test.go | 27 ++++ tests/integration/agent_crud_test.go | 45 ++++++ tests/integration/agent_list_sync_test.go | 6 +- tests/integration/agent_project_mode_test.go | 73 ++++++++++ ui/src/api/client.ts | 21 ++- ui/src/pages/TrashPage.tsx | 50 +++---- 15 files changed, 600 insertions(+), 80 deletions(-) create mode 100644 cmd/skillshare/trash_tui_test.go diff --git a/cmd/skillshare/trash_tui.go b/cmd/skillshare/trash_tui.go index 6e1dc6e4..998d9b1e 100644 --- a/cmd/skillshare/trash_tui.go +++ b/cmd/skillshare/trash_tui.go @@ -37,6 +37,8 @@ func (i trashItem) Title() string { var kindBadge string if i.entry.Kind == "agent" { kindBadge = tc.Cyan.Render("[A]") + " " + } else { + kindBadge = tc.Cyan.Render("[S]") + " " } age := formatAge(time.Since(i.entry.Date)) size := formatBytes(i.entry.Size) @@ -698,7 +700,8 @@ func (m trashTUIModel) viewConfirm() string { verb := m.confirmAction switch verb { case "restore": - b.WriteString(fmt.Sprintf(" Restore %d item(s) to %s?\n\n", len(m.confirmNames), m.destDir)) + b.WriteString(m.renderRestoreConfirmHeader()) + b.WriteString("\n") case "delete": b.WriteString(" ") b.WriteString(tc.Red.Render(fmt.Sprintf("Permanently delete %d item(s)?", len(m.confirmNames)))) @@ -728,6 +731,32 @@ func (m trashTUIModel) viewConfirm() string { return b.String() } +func (m trashTUIModel) renderRestoreConfirmHeader() string { + var hasSkills, hasAgents bool + for _, entry := range m.selectedEntries() { + switch entry.Kind { + case "agent": + hasAgents = true + default: + hasSkills = true + } + } + + switch { + case hasSkills && hasAgents: + return fmt.Sprintf( + " Restore %d item(s)?\n\n skills -> %s\n agents -> %s\n", + len(m.confirmNames), + m.destDir, + m.agentDestDir, + ) + case hasAgents: + return fmt.Sprintf(" Restore %d item(s) to %s?\n", len(m.confirmNames), m.agentDestDir) + default: + return fmt.Sprintf(" Restore %d item(s) to %s?\n", len(m.confirmNames), m.destDir) + } +} + // --------------------------------------------------------------------------- // Rendering helpers // --------------------------------------------------------------------------- @@ -788,6 +817,11 @@ func (m trashTUIModel) renderTrashDetailPanel(entry trash.TrashEntry, width int) b.WriteString("\n") } + if entry.Kind == "agent" { + row("Type", tc.Cyan.Render("Agent")) + } else { + row("Type", tc.Cyan.Render("Skill")) + } row("Trashed", entry.Date.Format("2006-01-02 15:04:05")) row("Age", formatAge(time.Since(entry.Date))+" ago") row("Size", formatBytes(entry.Size)) @@ -800,21 +834,38 @@ func (m trashTUIModel) renderTrashDetailPanel(entry trash.TrashEntry, width int) } row("Path", pathStr) - // SKILL.md preview — read first 15 lines - skillMD := filepath.Join(entry.Path, "SKILL.md") - if data, err := os.ReadFile(skillMD); err == nil { - lines := strings.SplitN(string(data), "\n", 16) - if len(lines) > 15 { - lines = lines[:15] + // Content preview — SKILL.md for skills, agent .md file for agents + var previewFile, previewTitle string + if entry.Kind == "agent" { + // Find the .md file inside the trash directory + if entries, readErr := os.ReadDir(entry.Path); readErr == nil { + for _, e := range entries { + if !e.IsDir() && strings.HasSuffix(e.Name(), ".md") { + previewFile = filepath.Join(entry.Path, e.Name()) + previewTitle = e.Name() + break + } + } } - preview := strings.TrimRight(strings.Join(lines, "\n"), "\n") - if preview != "" { - b.WriteString("\n") - b.WriteString(tc.Title.Render("SKILL.md")) - b.WriteString("\n") - for _, line := range strings.Split(preview, "\n") { - b.WriteString(tc.Dim.Render(line)) + } else { + previewFile = filepath.Join(entry.Path, "SKILL.md") + previewTitle = "SKILL.md" + } + if previewFile != "" { + if data, err := os.ReadFile(previewFile); err == nil { + lines := strings.SplitN(string(data), "\n", 16) + if len(lines) > 15 { + lines = lines[:15] + } + preview := strings.TrimRight(strings.Join(lines, "\n"), "\n") + if preview != "" { b.WriteString("\n") + b.WriteString(tc.Title.Render(previewTitle)) + b.WriteString("\n") + for _, line := range strings.Split(preview, "\n") { + b.WriteString(tc.Dim.Render(line)) + b.WriteString("\n") + } } } } diff --git a/cmd/skillshare/trash_tui_test.go b/cmd/skillshare/trash_tui_test.go new file mode 100644 index 00000000..68887971 --- /dev/null +++ b/cmd/skillshare/trash_tui_test.go @@ -0,0 +1,46 @@ +package main + +import ( + "strings" + "testing" + + "skillshare/internal/trash" +) + +func TestTrashTUIRenderRestoreConfirmHeader_UsesAgentDestination(t *testing.T) { + model := newTrashTUIModel([]trash.TrashEntry{ + {Name: "tutor", Kind: "agent"}, + }, "", "", "/tmp/skills", "/tmp/agents", "", "global") + model.selected[0] = true + model.selCount = 1 + model.confirmAction = "restore" + model.confirmNames = []string{"tutor"} + + got := model.renderRestoreConfirmHeader() + if !strings.Contains(got, "/tmp/agents") { + t.Fatalf("expected agent restore header to use agent destination, got %q", got) + } + if strings.Contains(got, "/tmp/skills") { + t.Fatalf("expected agent restore header to avoid skill destination, got %q", got) + } +} + +func TestTrashTUIRenderRestoreConfirmHeader_ShowsMixedDestinations(t *testing.T) { + model := newTrashTUIModel([]trash.TrashEntry{ + {Name: "demo-skill", Kind: "skill"}, + {Name: "tutor", Kind: "agent"}, + }, "", "", "/tmp/skills", "/tmp/agents", "", "global") + model.selected[0] = true + model.selected[1] = true + model.selCount = 2 + model.confirmAction = "restore" + model.confirmNames = []string{"demo-skill", "tutor"} + + got := model.renderRestoreConfirmHeader() + if !strings.Contains(got, "skills -> /tmp/skills") { + t.Fatalf("expected mixed restore header to mention skills destination, got %q", got) + } + if !strings.Contains(got, "agents -> /tmp/agents") { + t.Fatalf("expected mixed restore header to mention agent destination, got %q", got) + } +} diff --git a/internal/resource/agent.go b/internal/resource/agent.go index 1a217d87..5dfe5c2d 100644 --- a/internal/resource/agent.go +++ b/internal/resource/agent.go @@ -113,17 +113,17 @@ func (AgentKind) ResolveName(path string) string { return agentNameFromFile(path, filepath.Base(path)) } -// FlatName strips directory prefixes, keeping only the filename. -// Example: "curriculum/math-tutor.md" → "math-tutor.md" +// FlatName flattens nested agent paths using the shared __ separator. +// Example: "curriculum/math-tutor.md" → "curriculum__math-tutor.md" func (AgentKind) FlatName(relPath string) string { return AgentFlatName(relPath) } // AgentFlatName is the standalone flat name computation for agents. -// Strips directory prefixes, keeping only the filename. +// Agents must sync into flat target directories, so nested segments are +// encoded using the same path flattening rule as skills. func AgentFlatName(relPath string) string { - relPath = strings.ReplaceAll(relPath, "\\", "/") - return filepath.Base(relPath) + return utils.PathToFlatName(relPath) } // ActiveAgents returns only non-disabled agents from the given slice. diff --git a/internal/resource/kind_test.go b/internal/resource/kind_test.go index 961126d5..b031b4ee 100644 --- a/internal/resource/kind_test.go +++ b/internal/resource/kind_test.go @@ -220,8 +220,8 @@ func TestAgentKind_Discover_Nested(t *testing.T) { if r.RelPath != "curriculum/math-tutor.md" { t.Errorf("RelPath = %q, want %q", r.RelPath, "curriculum/math-tutor.md") } - if r.FlatName != "math-tutor.md" { - t.Errorf("FlatName = %q, want %q", r.FlatName, "math-tutor.md") + if r.FlatName != "curriculum__math-tutor.md" { + t.Errorf("FlatName = %q, want %q", r.FlatName, "curriculum__math-tutor.md") } if !r.IsNested { t.Error("expected IsNested=true for nested agent") @@ -260,8 +260,8 @@ func TestAgentKind_FlatName(t *testing.T) { want string }{ {"tutor.md", "tutor.md"}, - {"curriculum/math-tutor.md", "math-tutor.md"}, - {"a/b/deep.md", "deep.md"}, + {"curriculum/math-tutor.md", "curriculum__math-tutor.md"}, + {"a/b/deep.md", "a__b__deep.md"}, } for _, tt := range tests { diff --git a/internal/server/handler_trash.go b/internal/server/handler_trash.go index 5dc9e105..ba01cbdf 100644 --- a/internal/server/handler_trash.go +++ b/internal/server/handler_trash.go @@ -1,6 +1,7 @@ package server import ( + "fmt" "net/http" "os" "time" @@ -8,6 +9,14 @@ import ( "skillshare/internal/trash" ) +type trashKind string + +const ( + trashKindAll trashKind = "all" + trashKindSkill trashKind = "skill" + trashKindAgent trashKind = "agent" +) + type trashItemJSON struct { Name string `json:"name"` Kind string `json:"kind,omitempty"` @@ -17,6 +26,12 @@ type trashItemJSON struct { Path string `json:"path"` } +type resolvedTrashEntry struct { + entry *trash.TrashEntry + kind trashKind + dest string +} + // trashBase returns the trash directory for the current mode. func (s *Server) trashBase() string { if s.IsProjectMode() { @@ -33,6 +48,54 @@ func (s *Server) agentTrashBase() string { return trash.AgentTrashDir() } +func parseTrashKind(raw string) (trashKind, error) { + switch raw { + case "", "all": + return trashKindAll, nil + case "skill", "skills": + return trashKindSkill, nil + case "agent", "agents": + return trashKindAgent, nil + default: + return "", fmt.Errorf("invalid trash kind %q", raw) + } +} + +func (s *Server) trashDest(kind trashKind) string { + switch kind { + case trashKindAgent: + return s.agentsSource() + default: + return s.skillsSource() + } +} + +func (s *Server) findTrashEntry(name string, kind trashKind) (*resolvedTrashEntry, error) { + if kind == trashKindSkill || kind == trashKindAll { + base := s.trashBase() + if entry := trash.FindByName(base, name); entry != nil { + return &resolvedTrashEntry{ + entry: entry, + kind: trashKindSkill, + dest: s.trashDest(trashKindSkill), + }, nil + } + } + + if kind == trashKindAgent || kind == trashKindAll { + base := s.agentTrashBase() + if entry := trash.FindByName(base, name); entry != nil { + return &resolvedTrashEntry{ + entry: entry, + kind: trashKindAgent, + dest: s.trashDest(trashKindAgent), + }, nil + } + } + + return nil, nil +} + // handleListTrash returns all trashed items with total size. func (s *Server) handleListTrash(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. @@ -73,22 +136,36 @@ func (s *Server) handleListTrash(w http.ResponseWriter, r *http.Request) { }) } -// handleRestoreTrash restores a trashed skill back to the source directory. +// handleRestoreTrash restores a trashed skill or agent back to its source directory. func (s *Server) handleRestoreTrash(w http.ResponseWriter, r *http.Request) { start := time.Now() s.mu.Lock() defer s.mu.Unlock() name := r.PathValue("name") - base := s.trashBase() + kind, err := parseTrashKind(r.URL.Query().Get("kind")) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } - entry := trash.FindByName(base, name) - if entry == nil { + resolved, err := s.findTrashEntry(name, kind) + if err != nil { + writeError(w, http.StatusInternalServerError, "failed to resolve trashed item: "+err.Error()) + return + } + if resolved == nil { writeError(w, http.StatusNotFound, "trashed item not found: "+name) return } - if err := trash.Restore(entry, s.cfg.Source); err != nil { + switch resolved.kind { + case trashKindAgent: + err = trash.RestoreAgent(resolved.entry, resolved.dest) + default: + err = trash.Restore(resolved.entry, resolved.dest) + } + if err != nil { writeError(w, http.StatusInternalServerError, "failed to restore: "+err.Error()) return } @@ -96,6 +173,7 @@ func (s *Server) handleRestoreTrash(w http.ResponseWriter, r *http.Request) { s.writeOpsLog("trash", "ok", start, map[string]any{ "action": "restore", "name": name, + "kind": string(resolved.kind), "scope": "ui", }, "") @@ -109,15 +187,23 @@ func (s *Server) handleDeleteTrash(w http.ResponseWriter, r *http.Request) { defer s.mu.Unlock() name := r.PathValue("name") - base := s.trashBase() + kind, err := parseTrashKind(r.URL.Query().Get("kind")) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } - entry := trash.FindByName(base, name) - if entry == nil { + resolved, err := s.findTrashEntry(name, kind) + if err != nil { + writeError(w, http.StatusInternalServerError, "failed to resolve trashed item: "+err.Error()) + return + } + if resolved == nil { writeError(w, http.StatusNotFound, "trashed item not found: "+name) return } - if err := os.RemoveAll(entry.Path); err != nil { + if err := os.RemoveAll(resolved.entry.Path); err != nil { writeError(w, http.StatusInternalServerError, "failed to delete: "+err.Error()) return } @@ -125,6 +211,7 @@ func (s *Server) handleDeleteTrash(w http.ResponseWriter, r *http.Request) { s.writeOpsLog("trash", "ok", start, map[string]any{ "action": "delete", "name": name, + "kind": string(resolved.kind), "scope": "ui", }, "") @@ -137,20 +224,39 @@ func (s *Server) handleEmptyTrash(w http.ResponseWriter, r *http.Request) { s.mu.Lock() defer s.mu.Unlock() - base := s.trashBase() - items := trash.List(base) + kind, err := parseTrashKind(r.URL.Query().Get("kind")) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + type emptyTarget struct { + base string + } + targets := make([]emptyTarget, 0, 2) + if kind == trashKindAll || kind == trashKindSkill { + targets = append(targets, emptyTarget{base: s.trashBase()}) + } + if kind == trashKindAll || kind == trashKindAgent { + targets = append(targets, emptyTarget{base: s.agentTrashBase()}) + } + removed := 0 - for _, item := range items { - if err := os.RemoveAll(item.Path); err != nil { - writeError(w, http.StatusInternalServerError, "failed to empty trash: "+err.Error()) - return + for _, target := range targets { + items := trash.List(target.base) + for _, item := range items { + if err := os.RemoveAll(item.Path); err != nil { + writeError(w, http.StatusInternalServerError, "failed to empty trash: "+err.Error()) + return + } + removed++ } - removed++ } s.writeOpsLog("trash", "ok", start, map[string]any{ "action": "empty", + "kind": string(kind), "removed": removed, "scope": "ui", }, "") diff --git a/internal/server/handler_trash_test.go b/internal/server/handler_trash_test.go index dd1cc3bb..66727419 100644 --- a/internal/server/handler_trash_test.go +++ b/internal/server/handler_trash_test.go @@ -4,7 +4,11 @@ import ( "encoding/json" "net/http" "net/http/httptest" + "os" + "path/filepath" "testing" + + "skillshare/internal/trash" ) func TestHandleListTrash_Empty(t *testing.T) { @@ -41,6 +45,36 @@ func TestHandleRestoreTrash_NotFound(t *testing.T) { } } +func TestHandleRestoreTrash_AgentKind(t *testing.T) { + s, _ := newTestServer(t) + + agentsDir := s.cfg.EffectiveAgentsSource() + if err := os.MkdirAll(agentsDir, 0755); err != nil { + t.Fatalf("failed to create agents dir: %v", err) + } + agentFile := filepath.Join(agentsDir, "tutor.md") + if err := os.WriteFile(agentFile, []byte("# Tutor agent"), 0644); err != nil { + t.Fatalf("failed to seed agent: %v", err) + } + if _, err := trash.MoveAgentToTrash(agentFile, "", "tutor", s.agentTrashBase()); err != nil { + t.Fatalf("failed to move agent to trash: %v", err) + } + + req := httptest.NewRequest(http.MethodPost, "/api/trash/tutor/restore?kind=agent", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + if _, err := os.Stat(filepath.Join(agentsDir, "tutor.md")); err != nil { + t.Fatalf("expected restored agent file, got: %v", err) + } + if entry := trash.FindByName(s.agentTrashBase(), "tutor"); entry != nil { + t.Fatalf("expected agent trash entry to be removed after restore") + } +} + func TestHandleDeleteTrash_NotFound(t *testing.T) { s, _ := newTestServer(t) req := httptest.NewRequest(http.MethodDelete, "/api/trash/nonexistent", nil) @@ -54,6 +88,25 @@ func TestHandleDeleteTrash_NotFound(t *testing.T) { func TestHandleEmptyTrash(t *testing.T) { s, _ := newTestServer(t) + + addSkill(t, s.skillsSource(), "trash-skill") + skillDir := filepath.Join(s.skillsSource(), "trash-skill") + if _, err := trash.MoveToTrash(skillDir, "trash-skill", s.trashBase()); err != nil { + t.Fatalf("failed to trash skill: %v", err) + } + + agentsDir := s.cfg.EffectiveAgentsSource() + if err := os.MkdirAll(agentsDir, 0755); err != nil { + t.Fatalf("failed to create agents dir: %v", err) + } + agentFile := filepath.Join(agentsDir, "tutor.md") + if err := os.WriteFile(agentFile, []byte("# Tutor agent"), 0644); err != nil { + t.Fatalf("failed to seed agent: %v", err) + } + if _, err := trash.MoveAgentToTrash(agentFile, "", "tutor", s.agentTrashBase()); err != nil { + t.Fatalf("failed to trash agent: %v", err) + } + req := httptest.NewRequest(http.MethodPost, "/api/trash/empty", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -70,7 +123,13 @@ func TestHandleEmptyTrash(t *testing.T) { if !resp.Success { t.Error("expected success true") } - if resp.Removed != 0 { - t.Errorf("expected 0 removed, got %d", resp.Removed) + if resp.Removed != 2 { + t.Errorf("expected 2 removed, got %d", resp.Removed) + } + if len(trash.List(s.trashBase())) != 0 { + t.Errorf("expected skill trash to be empty after empty") + } + if len(trash.List(s.agentTrashBase())) != 0 { + t.Errorf("expected agent trash to be empty after empty") } } diff --git a/internal/server/server.go b/internal/server/server.go index 0d61b742..a3af86be 100644 --- a/internal/server/server.go +++ b/internal/server/server.go @@ -142,6 +142,15 @@ func (s *Server) IsProjectMode() bool { return s.projectRoot != "" } +// skillsSource returns the skills source directory for the current mode. +// Caller must hold s.mu (RLock or Lock) when accessing s.cfg. +func (s *Server) skillsSource() string { + if s.IsProjectMode() { + return filepath.Join(s.projectRoot, ".skillshare", "skills") + } + return s.cfg.Source +} + // agentsSource returns the agents source directory for the current mode. // Caller must hold s.mu (RLock or Lock) when accessing s.cfg. func (s *Server) agentsSource() string { diff --git a/internal/sync/agent_sync_test.go b/internal/sync/agent_sync_test.go index ecc87583..e25eb363 100644 --- a/internal/sync/agent_sync_test.go +++ b/internal/sync/agent_sync_test.go @@ -21,14 +21,14 @@ func TestCheckAgentCollisions_NoCollision(t *testing.T) { func TestCheckAgentCollisions_HasCollision(t *testing.T) { agents := []resource.DiscoveredResource{ - {FlatName: "helper.md", RelPath: "a/helper.md"}, - {FlatName: "helper.md", RelPath: "b/helper.md"}, + {FlatName: "team__helper.md", RelPath: "team/helper.md"}, + {FlatName: "team__helper.md", RelPath: "team__helper.md"}, } collisions := CheckAgentCollisions(agents) if len(collisions) != 1 { t.Fatalf("expected 1 collision, got %d", len(collisions)) } - if collisions[0].FlatName != "helper.md" { + if collisions[0].FlatName != "team__helper.md" { t.Errorf("collision FlatName = %q", collisions[0].FlatName) } } @@ -437,6 +437,73 @@ func TestSyncAgents_DefaultIsMerge(t *testing.T) { } } +func TestSyncAgents_MergeMode_NestedSameBasename_IsStable(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + + if err := os.MkdirAll(filepath.Join(sourceDir, "team-a"), 0o755); err != nil { + t.Fatalf("mkdir team-a: %v", err) + } + if err := os.MkdirAll(filepath.Join(sourceDir, "team-b"), 0o755); err != nil { + t.Fatalf("mkdir team-b: %v", err) + } + + teamAPath := filepath.Join(sourceDir, "team-a", "helper.md") + teamBPath := filepath.Join(sourceDir, "team-b", "helper.md") + if err := os.WriteFile(teamAPath, []byte("# Team A"), 0o644); err != nil { + t.Fatalf("write team-a helper: %v", err) + } + if err := os.WriteFile(teamBPath, []byte("# Team B"), 0o644); err != nil { + t.Fatalf("write team-b helper: %v", err) + } + + agents, err := resource.AgentKind{}.Discover(sourceDir) + if err != nil { + t.Fatalf("discover agents: %v", err) + } + if len(agents) != 2 { + t.Fatalf("expected 2 agents, got %d", len(agents)) + } + + first, err := SyncAgents(agents, sourceDir, targetDir, "merge", false, false) + if err != nil { + t.Fatalf("first sync: %v", err) + } + if len(first.Linked) != 2 { + t.Fatalf("first sync: expected 2 linked, got %d", len(first.Linked)) + } + if len(first.Updated) != 0 { + t.Fatalf("first sync: expected 0 updated, got %d", len(first.Updated)) + } + + second, err := SyncAgents(agents, sourceDir, targetDir, "merge", false, false) + if err != nil { + t.Fatalf("second sync: %v", err) + } + if len(second.Linked) != 2 { + t.Fatalf("second sync: expected 2 linked, got %d", len(second.Linked)) + } + if len(second.Updated) != 0 { + t.Fatalf("second sync: expected 0 updated, got %d", len(second.Updated)) + } + + linkA, err := os.Readlink(filepath.Join(targetDir, "team-a__helper.md")) + if err != nil { + t.Fatalf("readlink team-a target: %v", err) + } + if linkA != teamAPath { + t.Fatalf("team-a symlink = %q, want %q", linkA, teamAPath) + } + + linkB, err := os.Readlink(filepath.Join(targetDir, "team-b__helper.md")) + if err != nil { + t.Fatalf("readlink team-b target: %v", err) + } + if linkB != teamBPath { + t.Fatalf("team-b symlink = %q, want %q", linkB, teamBPath) + } +} + func TestCollectAgents_DryRun(t *testing.T) { targetDir := t.TempDir() os.WriteFile(filepath.Join(targetDir, "agent.md"), []byte("# Agent"), 0644) diff --git a/internal/trash/trash.go b/internal/trash/trash.go index b9956dd7..ce9a5d47 100644 --- a/internal/trash/trash.go +++ b/internal/trash/trash.go @@ -14,6 +14,8 @@ import ( const defaultMaxAge = 7 * 24 * time.Hour // 7 days +const reservedAgentTrashDir = "agents" + // TrashDir returns the global trash directory path. func TrashDir() string { return filepath.Join(config.DataDir(), "trash") @@ -114,9 +116,13 @@ func MoveToTrash(srcPath, name, trashBase string) (string, error) { // Walks recursively to find nested entries (e.g., "org/_team-skills_"). func List(trashBase string) []TrashEntry { var items []TrashEntry + base := filepath.Clean(trashBase) filepath.WalkDir(trashBase, func(path string, d fs.DirEntry, err error) error { - if err != nil || !d.IsDir() || path == trashBase { + if shouldSkipReservedAgentTrashSubtree(base, path, d) { + return fs.SkipDir + } + if err != nil || !d.IsDir() || path == base { return nil } @@ -157,6 +163,19 @@ func List(trashBase string) []TrashEntry { return items } +// shouldSkipReservedAgentTrashSubtree prevents the skills trash root from +// recursively listing agent trash entries under the reserved "trash/agents" path. +// Agent trash is listed separately from AgentTrashDir()/ProjectAgentTrashDir(). +func shouldSkipReservedAgentTrashSubtree(base, path string, d fs.DirEntry) bool { + if d == nil || !d.IsDir() { + return false + } + if filepath.Base(base) != "trash" { + return false + } + return filepath.Clean(path) == filepath.Join(base, reservedAgentTrashDir) +} + // Cleanup removes trashed items older than maxAge. // Returns the number of items removed. func Cleanup(trashBase string, maxAge time.Duration) (int, error) { @@ -252,7 +271,12 @@ func Restore(entry *TrashEntry, destDir string) error { // Unlike Restore (which moves the whole directory), this copies individual files // from the trashed directory to destDir (since agents are file-based, not directory-based). func RestoreAgent(entry *TrashEntry, destDir string) error { - if err := os.MkdirAll(destDir, 0755); err != nil { + // Reconstruct subdirectory for nested agents (e.g., "demo/my-agent" → destDir/demo/) + targetDir := destDir + if subDir := filepath.Dir(entry.Name); subDir != "." { + targetDir = filepath.Join(destDir, subDir) + } + if err := os.MkdirAll(targetDir, 0755); err != nil { return fmt.Errorf("failed to create agent destination: %w", err) } @@ -267,10 +291,10 @@ func RestoreAgent(entry *TrashEntry, destDir string) error { continue } srcPath := filepath.Join(entry.Path, e.Name()) - destPath := filepath.Join(destDir, e.Name()) + destPath := filepath.Join(targetDir, e.Name()) if _, statErr := os.Stat(destPath); statErr == nil { - return fmt.Errorf("'%s' already exists in %s", e.Name(), destDir) + return fmt.Errorf("'%s' already exists in %s", e.Name(), targetDir) } // Try rename, fallback to copy diff --git a/internal/trash/trash_test.go b/internal/trash/trash_test.go index f6512434..1da45f1b 100644 --- a/internal/trash/trash_test.go +++ b/internal/trash/trash_test.go @@ -260,6 +260,22 @@ func TestList_NestedEntries(t *testing.T) { } } +func TestList_SkipsReservedAgentTrashSubtree(t *testing.T) { + tmpDir := t.TempDir() + trashBase := filepath.Join(tmpDir, "trash") + + os.MkdirAll(filepath.Join(trashBase, "skill-a_2026-01-01_10-00-00"), 0755) + os.MkdirAll(filepath.Join(trashBase, "agents", "demo", "code-archaeologist_2026-01-02_10-00-00"), 0755) + + items := List(trashBase) + if len(items) != 1 { + t.Fatalf("expected only skill trash items, got %d", len(items)) + } + if items[0].Name != "skill-a" { + t.Fatalf("expected only skill-a, got %q", items[0].Name) + } +} + func TestFindByName_NestedName(t *testing.T) { tmpDir := t.TempDir() trashBase := filepath.Join(tmpDir, "trash") @@ -275,6 +291,17 @@ func TestFindByName_NestedName(t *testing.T) { } } +func TestFindByName_SkipsReservedAgentTrashSubtree(t *testing.T) { + tmpDir := t.TempDir() + trashBase := filepath.Join(tmpDir, "trash") + + os.MkdirAll(filepath.Join(trashBase, "agents", "demo", "code-archaeologist_2026-01-01_10-00-00"), 0755) + + if entry := FindByName(trashBase, "agents/demo/code-archaeologist"); entry != nil { + t.Fatalf("expected reserved agent subtree to be ignored, got %q", entry.Name) + } +} + func TestRestore_NestedName(t *testing.T) { tmpDir := t.TempDir() trashBase := filepath.Join(tmpDir, "trash") diff --git a/tests/integration/agent_crud_test.go b/tests/integration/agent_crud_test.go index 03dd1183..4bc86ff7 100644 --- a/tests/integration/agent_crud_test.go +++ b/tests/integration/agent_crud_test.go @@ -244,6 +244,31 @@ func TestTrash_Agents_Restore(t *testing.T) { } } +func TestTrash_Agents_Restore_Nested_DoesNotGoToSkills(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := createAgentSource(t, sb, map[string]string{ + "demo/code-archaeologist.md": "# Code Archaeologist", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + sb.RunCLI("uninstall", "-g", "agents", "demo/code-archaeologist", "--force") + + result := sb.RunCLI("trash", "agents", "restore", "demo/code-archaeologist") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "Restored") + + if _, err := os.Stat(filepath.Join(agentsDir, "demo", "code-archaeologist.md")); err != nil { + t.Fatalf("nested agent should be restored to agents source: %v", err) + } + + wrongSkillsPath := filepath.Join(sb.SourcePath, "agents", "demo", "code-archaeologist", "code-archaeologist.md") + if _, err := os.Stat(wrongSkillsPath); err == nil { + t.Fatalf("nested agent should not be restored into skills tree: %s", wrongSkillsPath) + } +} + // --- default behavior unchanged --- func TestTrash_Default_SkillsOnly(t *testing.T) { @@ -257,3 +282,23 @@ func TestTrash_Default_SkillsOnly(t *testing.T) { result.AssertSuccess(t) result.AssertAnyOutputContains(t, "empty") } + +func TestTrash_Default_SkillsOnly_IgnoresAgentTrash(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "demo/tutor.md": "# Tutor agent", + }) + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + sb.RunCLI("uninstall", "-g", "agents", "demo/tutor", "--force") + + result := sb.RunCLI("trash", "list", "--no-tui") + result.AssertSuccess(t) + result.AssertAnyOutputContains(t, "empty") + + restore := sb.RunCLI("trash", "restore", "demo/tutor") + restore.AssertFailure(t) + restore.AssertAnyOutputContains(t, "not found in trash") +} diff --git a/tests/integration/agent_list_sync_test.go b/tests/integration/agent_list_sync_test.go index d5da5a46..5c1e103c 100644 --- a/tests/integration/agent_list_sync_test.go +++ b/tests/integration/agent_list_sync_test.go @@ -19,7 +19,11 @@ func createAgentSource(t *testing.T, sb *testutil.Sandbox, agents map[string]str t.Fatalf("failed to create agents dir: %v", err) } for name, content := range agents { - if err := os.WriteFile(filepath.Join(agentsDir, name), []byte(content), 0644); err != nil { + agentPath := filepath.Join(agentsDir, name) + if err := os.MkdirAll(filepath.Dir(agentPath), 0755); err != nil { + t.Fatalf("failed to create agent parent dir for %s: %v", name, err) + } + if err := os.WriteFile(agentPath, []byte(content), 0644); err != nil { t.Fatalf("failed to write agent %s: %v", name, err) } } diff --git a/tests/integration/agent_project_mode_test.go b/tests/integration/agent_project_mode_test.go index 59ea1679..8f8965ad 100644 --- a/tests/integration/agent_project_mode_test.go +++ b/tests/integration/agent_project_mode_test.go @@ -191,6 +191,79 @@ func TestAuditProject_Agents(t *testing.T) { result.AssertOutputNotContains(t, "not yet supported") } +func TestSyncProject_All_NestedAgentsSameBasename_FlattensAndStaysStable(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + projectDir := filepath.Join(sb.Root, "nested-agents-project") + skillsDir := filepath.Join(projectDir, ".skillshare", "skills") + agentsDir := filepath.Join(projectDir, ".skillshare", "agents") + claudeAgents := filepath.Join(projectDir, ".claude", "agents") + cursorAgents := filepath.Join(projectDir, ".cursor", "agents") + claudeSkills := filepath.Join(projectDir, ".claude", "skills") + cursorSkills := filepath.Join(projectDir, ".cursor", "skills") + + for _, dir := range []string{ + filepath.Join(skillsDir, "sample-skill"), + filepath.Join(agentsDir, "team-a"), + filepath.Join(agentsDir, "team-b"), + claudeAgents, + cursorAgents, + claudeSkills, + cursorSkills, + } { + if err := os.MkdirAll(dir, 0o755); err != nil { + t.Fatalf("mkdir %s: %v", dir, err) + } + } + + if err := os.WriteFile(filepath.Join(skillsDir, "sample-skill", "SKILL.md"), []byte("---\nname: sample-skill\n---\n# Sample"), 0o644); err != nil { + t.Fatalf("write sample skill: %v", err) + } + if err := os.WriteFile(filepath.Join(agentsDir, "team-a", "helper.md"), []byte("# Team A"), 0o644); err != nil { + t.Fatalf("write team-a helper: %v", err) + } + if err := os.WriteFile(filepath.Join(agentsDir, "team-b", "helper.md"), []byte("# Team B"), 0o644); err != nil { + t.Fatalf("write team-b helper: %v", err) + } + + configContent := `targets: + - name: claude + skills: + path: ` + claudeSkills + ` + agents: + path: ` + claudeAgents + ` + - name: cursor + skills: + path: ` + cursorSkills + ` + agents: + path: ` + cursorAgents + ` +` + if err := os.WriteFile(filepath.Join(projectDir, ".skillshare", "config.yaml"), []byte(configContent), 0o644); err != nil { + t.Fatalf("write project config: %v", err) + } + + sb.WriteConfig(`source: ` + sb.SourcePath + "\ntargets: {}\n") + + first := sb.RunCLIInDir(projectDir, "sync", "-p", "all") + first.AssertSuccess(t) + first.AssertAnyOutputContains(t, "Agent sync complete") + first.AssertAnyOutputContains(t, "0 updated") + + second := sb.RunCLIInDir(projectDir, "sync", "-p", "all") + second.AssertSuccess(t) + second.AssertAnyOutputContains(t, "Agent sync complete") + second.AssertAnyOutputContains(t, "0 updated") + + for _, base := range []string{claudeAgents, cursorAgents} { + for _, name := range []string{"team-a__helper.md", "team-b__helper.md"} { + if _, err := os.Lstat(filepath.Join(base, name)); err != nil { + t.Fatalf("expected synced agent %s in %s: %v", name, base, err) + } + } + } +} + // --- default -p (skills only, unchanged) --- func TestStatusProject_Default_SkillsOnly(t *testing.T) { diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index b807a163..97d340de 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -346,12 +346,21 @@ export const api = { // Trash listTrash: () => apiFetch('/trash'), - restoreTrash: (name: string) => - apiFetch<{ success: boolean }>(`/trash/${encodeURIComponent(name)}/restore`, { method: 'POST' }), - deleteTrash: (name: string) => - apiFetch<{ success: boolean }>(`/trash/${encodeURIComponent(name)}`, { method: 'DELETE' }), - emptyTrash: () => - apiFetch<{ success: boolean; removed: number }>('/trash/empty', { method: 'POST' }), + restoreTrash: (name: string, kind?: 'skill' | 'agent') => + apiFetch<{ success: boolean }>( + `/trash/${encodeURIComponent(name)}/restore${kind ? `?kind=${encodeURIComponent(kind)}` : ''}`, + { method: 'POST' }, + ), + deleteTrash: (name: string, kind?: 'skill' | 'agent') => + apiFetch<{ success: boolean }>( + `/trash/${encodeURIComponent(name)}${kind ? `?kind=${encodeURIComponent(kind)}` : ''}`, + { method: 'DELETE' }, + ), + emptyTrash: (kind: 'skill' | 'agent' | 'all' = 'all') => + apiFetch<{ success: boolean; removed: number }>( + `/trash/empty${kind ? `?kind=${encodeURIComponent(kind)}` : ''}`, + { method: 'POST' }, + ), // Extras listExtras: () => apiFetch<{ extras: Extra[] }>('/extras'), diff --git a/ui/src/pages/TrashPage.tsx b/ui/src/pages/TrashPage.tsx index 416d3aec..f98e6282 100644 --- a/ui/src/pages/TrashPage.tsx +++ b/ui/src/pages/TrashPage.tsx @@ -47,9 +47,9 @@ export default function TrashPage() { staleTime: staleTimes.trash, }); - const [restoreName, setRestoreName] = useState(null); + const [restoreItem, setRestoreItem] = useState(null); const [restoring, setRestoring] = useState(false); - const [deleteName, setDeleteName] = useState(null); + const [deleteItem, setDeleteItem] = useState(null); const [deleting, setDeleting] = useState(false); const [emptyOpen, setEmptyOpen] = useState(false); const [emptying, setEmptying] = useState(false); @@ -62,41 +62,41 @@ export default function TrashPage() { }; const handleRestore = async () => { - if (!restoreName) return; + if (!restoreItem) return; setRestoring(true); try { - await api.restoreTrash(restoreName); - toast(`Restored "${restoreName}" from trash`, 'success'); + await api.restoreTrash(restoreItem.name, restoreItem.kind ?? 'skill'); + toast(`Restored "${restoreItem.name}" from trash`, 'success'); queryClient.invalidateQueries({ queryKey: queryKeys.trash }); queryClient.invalidateQueries({ queryKey: queryKeys.skills.all }); } catch (e: any) { toast(e.message, 'error'); } finally { setRestoring(false); - setRestoreName(null); + setRestoreItem(null); } }; const handleDelete = async () => { - if (!deleteName) return; + if (!deleteItem) return; setDeleting(true); try { - await api.deleteTrash(deleteName); - toast(`Permanently deleted "${deleteName}"`, 'success'); + await api.deleteTrash(deleteItem.name, deleteItem.kind ?? 'skill'); + toast(`Permanently deleted "${deleteItem.name}"`, 'success'); queryClient.invalidateQueries({ queryKey: queryKeys.trash }); queryClient.invalidateQueries({ queryKey: queryKeys.skills.all }); } catch (e: any) { toast(e.message, 'error'); } finally { setDeleting(false); - setDeleteName(null); + setDeleteItem(null); } }; const handleEmpty = async () => { setEmptying(true); try { - const res = await api.emptyTrash(); + const res = await api.emptyTrash('all'); toast(`Emptied trash (${res.removed} item${res.removed !== 1 ? 's' : ''} removed)`, 'success'); queryClient.invalidateQueries({ queryKey: queryKeys.trash }); queryClient.invalidateQueries({ queryKey: queryKeys.skills.all }); @@ -124,8 +124,8 @@ export default function TrashPage() { icon={} title="Trash" subtitle={isProjectMode - ? 'Recently deleted project skills are kept for 7 days before automatic cleanup' - : 'Recently deleted skills are kept for 7 days before automatic cleanup'} + ? 'Recently deleted project skills and agents are kept for 7 days before automatic cleanup' + : 'Recently deleted skills and agents are kept for 7 days before automatic cleanup'} actions={ <> - {(skill.isInRepo || skill.source) && ( + {(resource.isInRepo || resource.source) && (

- {skill.kind !== 'agent' && + {resource.kind !== 'agent' &&

@@ -557,7 +558,7 @@ export default function SkillDetailPage() { {linkedSkill ? ( {/* Target Distribution */} - + {/* Target Sync Status */} - +

@@ -606,9 +607,9 @@ export default function SkillDetailPage() { {viewingFile && ( setViewingFile(null)} /> @@ -637,11 +638,11 @@ export default function SkillDetailPage() { {/* Confirm uninstall dialog */} (queryKeys.skills.all, { ...previous, - skills: patchFn(previous.skills), + resources: patchFn(previous.resources), }); } return previous; @@ -181,7 +181,7 @@ function useSkillActions() { key: 'detail', label: 'View Detail', icon: , - onSelect: () => navigate(`/skills/${encodeURIComponent(skill.flatName)}`), + onSelect: () => navigate(`/resources/${encodeURIComponent(skill.flatName)}`), }, { key: 'toggle', @@ -585,7 +585,7 @@ const SkillPostit = memo(function SkillPostit({ return ( @@ -751,7 +751,7 @@ export default function SkillsPage() { } | null>(null); const [gridConfirmUninstallRepo, setGridConfirmUninstallRepo] = useState(null); - const skills = data?.skills ?? []; + const skills = data?.resources ?? []; // Compute counts for each filter type — scoped to the active tab const filterCounts = useMemo(() => { @@ -810,7 +810,7 @@ export default function SkillsPage() { subtitle="" className="mb-4!" actions={ - + } /> - {/* Add target form */} - {adding && ( - -

{ + setAdding(false); + setNewTarget({ name: '', path: '', agentPath: '' }); + setSearchQuery(''); + setCustomMode(false); + }} + maxWidth="xl" + padding="none" + > +
+

Add New Target

+

+ + +
- {/* Selected target preview + path + actions */} - {newTarget.name && !customMode ? ( -
-
- -
-

- {newTarget.name} -

-

+ {/* Selected target preview + path + actions */} + {newTarget.name && !customMode ? ( +

+
+ +
+

+ {newTarget.name} +

+ {newTarget.agentPath ? ( + <> +

+ Skills + {shortenHome(newTarget.path)} +

+

+ Agents + {shortenHome(newTarget.agentPath)} +

+ + ) : ( +

{shortenHome(newTarget.path)}

-
- + )}
+
+ setNewTarget({ ...newTarget, path: e.target.value })} + placeholder="/path/to/target/skills" + /> + {newTarget.agentPath && ( setNewTarget({ ...newTarget, path: e.target.value })} - placeholder="/path/to/target" + value={newTarget.agentPath} + onChange={(e) => setNewTarget({ ...newTarget, agentPath: e.target.value })} + placeholder="/path/to/target/agents" /> + )} -
- -
+
+ +
- ) : customMode ? ( - /* Custom target entry mode */ -
- setNewTarget({ ...newTarget, name: e.target.value })} - placeholder="my-custom-target" +
+ ) : customMode ? ( + /* Custom target entry mode */ +
+ setNewTarget({ ...newTarget, name: e.target.value })} + placeholder="my-custom-target" + autoFocus + /> + setNewTarget({ ...newTarget, path: e.target.value })} + placeholder="/path/to/target/skills" + /> + setNewTarget({ ...newTarget, agentPath: e.target.value })} + placeholder="/path/to/target/agents" + /> +
+ + +
+
+ ) : ( + /* Target picker mode */ +
+ {/* Search bar */} +
+ - setNewTarget({ ...newTarget, path: e.target.value })} - placeholder="/path/to/target/skills" + value={searchQuery} + onChange={(e) => setSearchQuery(e.target.value)} + placeholder="Search targets..." + className="w-full pl-10 pr-4 py-2.5 bg-surface border-2 border-muted text-pencil placeholder:text-muted-dark focus:outline-none focus:border-pencil transition-all" + style={{ + borderRadius: radius.sm, + fontSize: '1rem', + }} + autoFocus /> -
- - -
- ) : ( - /* Target picker mode */ -
- {/* Search bar */} -
- - setSearchQuery(e.target.value)} - placeholder="Search targets..." - className="w-full pl-10 pr-4 py-2.5 bg-surface border-2 border-muted text-pencil placeholder:text-muted-dark focus:outline-none focus:border-pencil transition-all" - style={{ - borderRadius: radius.sm, - fontSize: '1rem', - }} - autoFocus - /> -
- {/* Scrollable target list */} -
- {/* Detected section */} - {detected.length > 0 && ( -
-
-
- - - Detected on your system - -
- {detected.map((t) => ( - { - setNewTarget({ name: target.name, path: target.path }); - setSearchQuery(''); - }} - /> - ))} + {/* Scrollable target list */} +
+ {/* Detected section */} + {detected.length > 0 && ( +
+
+
+ + + Detected on your system +
- )} + {detected.map((t) => ( + { + setNewTarget({ name: target.name, path: target.path, agentPath: target.agentPath || '' }); + setSearchQuery(''); + }} + /> + ))} +
+ )} - {/* All available section */} - {others.length > 0 && ( -
-
- - All available targets - -
- {others.map((t) => ( - { - setNewTarget({ name: target.name, path: target.path }); - setSearchQuery(''); - }} - /> - ))} + {/* All available section */} + {others.length > 0 && ( +
+
+ + All available targets +
- )} + {others.map((t) => ( + { + setNewTarget({ name: target.name, path: target.path, agentPath: target.agentPath || '' }); + setSearchQuery(''); + }} + /> + ))} +
+ )} - {/* No results */} - {detected.length === 0 && others.length === 0 && ( -
- {searchQuery ? `No targets matching "${searchQuery}"` : 'No available targets'} -
- )} -
+ {/* No results */} + {detected.length === 0 && others.length === 0 && ( +
+ {searchQuery ? `No targets matching "${searchQuery}"` : 'No available targets'} +
+ )} +
- {/* Custom target link */} -
- -
+ {/* Custom target link */} +
+
- )} - - )} +
+ )} + {/* Targets list */} {targets.length > 0 ? ( @@ -365,28 +432,22 @@ export default function TargetsPage() { const expectedCount = target.expectedSkillCount || sourceSkillCount; const isMergeOrCopy = target.mode === 'merge' && target.status === 'merged' || target.mode === 'copy' && target.status === 'copied'; const hasDrift = isMergeOrCopy && target.linkedCount < expectedCount; + const agentSummary = targetAgentSummary(target); + const agentFilters = (target.agentInclude?.length ?? 0) + (target.agentExclude?.length ?? 0); + const visibleAgentInclude = (target.agentInclude ?? []).slice(0, 3); + const visibleAgentExclude = (target.agentExclude ?? []).slice(0, Math.max(0, 3 - visibleAgentInclude.length)); + const overflowAgentFilters = agentFilters - (visibleAgentInclude.length + visibleAgentExclude.length); return ( - {/* Top row: name + path + action icons */} -
-
-
- - {target.name} - -
-

- {shortenHome(target.path)} -

- {target.agentPath && ( -

- agent: {shortenHome(target.agentPath)} -

- )} + {/* Top row: name + action icons */} +
+
+ + {target.name}
{(target.mode === 'merge' || target.mode === 'copy') && target.localCount > 0 && ( @@ -408,83 +469,177 @@ export default function TargetsPage() { />
- {/* Full-width separator + sync controls */} -
- updateTargetSetting(target.name, { target_naming: naming }, `Target naming changed to ${naming}`)} - options={TARGET_NAMING_OPTIONS} + value={target.mode || 'merge'} + onChange={(mode) => updateTargetSetting(target.name, { mode }, `Sync mode changed to ${mode}`)} + options={SYNC_MODE_OPTIONS} size="sm" - className="w-48" + className="w-44" /> - )} + {target.mode !== 'symlink' && ( + updateTargetSetting(target.name, { agent_mode: mode }, `Agent mode changed to ${mode}`)} + options={AGENT_MODE_OPTIONS} + size="sm" + className="w-44" + /> + + {agentSummary.hasDrift ? ( + + + {agentSummary.text} + + ) : ( + agentSummary.text + )} + +
+
+ + {(() => { + const expected = target.agentExpectedCount ?? 0; + if (expected === 0) return 'No agents'; + if (!agentFilters) return `All ${expected} agents`; + const linked = target.agentLinkedCount ?? 0; + return `${linked}/${expected} agents`; + })()} + + {visibleAgentInclude.map((pattern, idx) => ( + + + {pattern} + + ))} + {visibleAgentExclude.map((pattern, idx) => ( + + − {pattern} + + ))} + {overflowAgentFilters > 0 && ( + +{overflowAgentFilters} more + )} + + {agentFilters ? 'Edit in Filter Studio →' : 'Customize filters →'} + +
)} {(target.skippedSkillCount ?? 0) > 0 && ( @@ -566,11 +721,22 @@ function TargetPickerItem({ {target.name} -

- {shortenHome(target.path)} -

+ {target.agentPath ? ( +
+

+ Skills + {shortenHome(target.path)} +

+

+ Agents + {shortenHome(target.agentPath)} +

+
+ ) : ( +

+ {shortenHome(target.path)} +

+ )}
{isDetected && ( Date: Thu, 9 Apr 2026 03:14:46 +0800 Subject: [PATCH 124/205] docs: update target/filtering docs for agent filters and metadata.targets - Add --agent-mode, --add-agent-include/exclude flags to target command docs - Update filtering reference with agent filter examples and storage paths - Prefer metadata.targets over top-level targets in all examples - Note agent filter limitations (merge/copy only, requires agents path) - Update skill-format docs with metadata.targets precedence note - Update built-in skill references/targets.md with agent filter examples --- ...t_command_options_skills_agents_runbook.md | 253 ++++++++++++++++++ skills/skillshare/references/targets.md | 16 +- .../how-to/daily-tasks/filtering-skills.md | 5 +- website/docs/reference/commands/target.md | 28 +- website/docs/reference/filtering.md | 28 +- .../docs/reference/targets/configuration.md | 14 +- website/docs/understand/skill-format.md | 2 + 7 files changed, 323 insertions(+), 23 deletions(-) create mode 100644 ai_docs/tests/target_command_options_skills_agents_runbook.md diff --git a/ai_docs/tests/target_command_options_skills_agents_runbook.md b/ai_docs/tests/target_command_options_skills_agents_runbook.md new file mode 100644 index 00000000..13627192 --- /dev/null +++ b/ai_docs/tests/target_command_options_skills_agents_runbook.md @@ -0,0 +1,253 @@ +# CLI E2E Runbook: Target Command Options (Skills + Agents) + +Validates `skillshare target` command options across plain list, JSON list, +target info/settings, and both global/project mode for skill and agent +configuration. + +**Origin**: v0.19.x — target settings gained explicit agents options alongside +existing skills options, and docs needed regression coverage against code. + +## Scope + +- `target help` advertises skill and agent settings flags +- `target list --no-tui` and `target list --json` expose agent metadata for + supported targets while leaving unsupported targets agent-free +- `target ` supports `--mode`, `--target-naming`, include/exclude, and + the agent counterparts `--agent-mode`, `--add/remove-agent-include/exclude` +- Agent filter flags are rejected for unsupported targets and for + `agent-mode=symlink` +- Project mode mirrors the same skill/agent target settings behavior + +## Environment + +Run inside devcontainer via mdproof. +Use `-g`/`-p` explicitly to avoid auto-mode ambiguity. + +## Steps + +### 1. Help output lists skill and agent target settings + +```bash +ss target help +``` + +Expected: +- exit_code: 0 +- --mode +- --agent-mode +- --target-naming +- --add-include +- --add-exclude +- --remove-include +- --remove-exclude +- --add-agent-include +- --add-agent-exclude +- --remove-agent-include +- --remove-agent-exclude + +### 2. Global plain target list shows skills and agents sections + +```bash +set -e +BASE=~/.config/skillshare +mkdir -p "$BASE/skills/team-alpha" "$BASE/agents" "$HOME/custom-tool/skills" + +printf '%s\n' \ + '---' \ + 'name: team-alpha' \ + 'description: Team Alpha skill' \ + 'metadata:' \ + ' targets: [claude]' \ + '---' \ + '# Team Alpha' \ + > "$BASE/skills/team-alpha/SKILL.md" + +printf '%s\n' \ + '---' \ + 'name: reviewer' \ + 'description: Review agent' \ + '---' \ + '# Reviewer' \ + > "$BASE/agents/reviewer.md" + +ss target add custom-tool "$HOME/custom-tool/skills" -g +ss target list --no-tui -g +``` + +Expected: +- exit_code: 0 +- claude +- custom-tool +- Skills: +- Agents: +- /.claude/agents + +### 3. Global JSON target list includes agent metadata only for supported targets + +```bash +ss target list --json -g +``` + +Expected: +- exit_code: 0 +- jq: .targets | length >= 2 +- jq: (.targets[] | select(.name == "claude").agentPath | type) == "string" +- jq: (.targets[] | select(.name == "claude").agentMode) == "merge" +- jq: (.targets[] | select(.name == "claude").agentExpectedCount) >= 0 +- jq: (.targets[] | select(.name == "custom-tool").agentPath) == null + +### 4. Global skill settings flags update target mode, naming, and filters + +```bash +set -e +ss target claude --mode copy -g +ss target claude --target-naming standard -g +ss target claude --add-include "team-*" -g +ss target claude --add-exclude "_legacy*" -g +ss target claude -g +``` + +Expected: +- exit_code: 0 +- Changed claude mode: merge -> copy +- Changed claude target naming: flat -> standard +- added include: team-* +- added exclude: _legacy* +- Mode: copy +- Naming: standard +- Include: team-* +- Exclude: _legacy* + +### 5. Global agent settings flags update agent mode and agent filters + +```bash +set -e +ss target claude --agent-mode copy -g +ss target claude --add-agent-include "team-*" -g +ss target claude --add-agent-exclude "draft-*" -g +ss target claude -g +``` + +Expected: +- exit_code: 0 +- Changed claude agent mode: merge -> copy +- added agent include: team-* +- added agent exclude: draft-* +- Agents: +- Mode: copy +- Include: team-* +- Exclude: draft-* + +### 6. Agent filter guard rails reject symlink mode and unsupported targets + +```bash +set -e +ss target claude --agent-mode symlink -g +ss target claude -g + +set +e +SYMLINK_ERR=$(ss target claude --add-agent-include "retry-*" -g 2>&1) +SYMLINK_STATUS=$? +CUSTOM_ERR=$(ss target custom-tool --add-agent-include "retry-*" -g 2>&1) +CUSTOM_STATUS=$? +set -e + +printf 'REJECTED_SYMLINK=%d\n' "$SYMLINK_STATUS" +printf '%s\n' "$SYMLINK_ERR" +printf 'REJECTED_CUSTOM=%d\n' "$CUSTOM_STATUS" +printf '%s\n' "$CUSTOM_ERR" +``` + +Expected: +- exit_code: 0 +- Changed claude agent mode: copy -> symlink +- Filters: ignored in symlink mode +- REJECTED_SYMLINK=1 +- ignored in symlink mode +- REJECTED_CUSTOM=1 +- target 'custom-tool' does not have an agents path + +### 7. Global remove flags clear both skill and agent filters + +```bash +set -e +ss target claude --agent-mode copy -g +ss target claude --remove-include "team-*" -g +ss target claude --remove-exclude "_legacy*" -g +ss target claude --remove-agent-include "team-*" -g +ss target claude --remove-agent-exclude "draft-*" -g +ss target claude -g +``` + +Expected: +- exit_code: 0 +- Changed claude agent mode: symlink -> copy +- removed include: team-* +- removed exclude: _legacy* +- removed agent include: team-* +- removed agent exclude: draft-* +- Include: (none) +- Exclude: (none) + +### 8. Project mode mirrors skill and agent target settings + +```bash +set -e +PROJECT=/tmp/target-options-project +rm -rf "$PROJECT" +mkdir -p "$PROJECT/.skillshare/skills" "$PROJECT/.skillshare/agents" + +cat > "$PROJECT/.skillshare/config.yaml" <<'EOF' +targets: + - claude +EOF + +cd "$PROJECT" +ss target claude --mode copy -p +ss target claude --target-naming standard -p +ss target claude --add-include "proj-*" -p +ss target claude --add-exclude "draft-*" -p +ss target claude --agent-mode copy -p +ss target claude --add-agent-include "proj-*" -p +ss target claude --add-agent-exclude "draft-*" -p +ss target claude -p +``` + +Expected: +- exit_code: 0 +- Changed claude mode: merge -> copy +- Changed claude target naming: flat -> standard +- added include: proj-* +- added exclude: draft-* +- Changed claude agent mode: merge -> copy +- added agent include: proj-* +- added agent exclude: draft-* +- Mode: copy +- Naming: standard +- Include: proj-* +- Exclude: draft-* +- Agents: + +### 9. Project JSON target list includes agent metadata + +```bash +cd /tmp/target-options-project +ss target list --json -p +``` + +Expected: +- exit_code: 0 +- jq: .targets | length == 1 +- jq: (.targets[0].name) == "claude" +- jq: (.targets[0].agentMode) == "copy" +- jq: (.targets[0].agentInclude) == ["proj-*"] +- jq: (.targets[0].agentExclude) == ["draft-*"] + +## Pass Criteria + +- The `target` command exposes both skill and agent settings in help and list + output +- Global and project target settings accept and persist skill/agent mode and + include/exclude changes +- Agent-only guard rails behave correctly for unsupported targets and + symlink-mode agent targets diff --git a/skills/skillshare/references/targets.md b/skills/skillshare/references/targets.md index fb2b5ad5..ec2609ce 100644 --- a/skills/skillshare/references/targets.md +++ b/skills/skillshare/references/targets.md @@ -35,17 +35,22 @@ targets: ## Target Filters -Control which skills sync to each target using include/exclude glob patterns. +Control which skills and agents sync to each target using include/exclude glob patterns. ```bash -# Add filters +# Add skill filters skillshare target claude --add-include "team-*" # Only sync matching skills skillshare target claude --add-exclude "_legacy*" # Skip matching skills skillshare target claude --add-include "team-*" -p # Project target filter +# Add agent filters +skillshare target claude --add-agent-include "team-*" +skillshare target claude --add-agent-exclude "draft-*" + # Remove filters skillshare target claude --remove-include "team-*" skillshare target claude --remove-exclude "_legacy*" +skillshare target claude --remove-agent-include "team-*" ``` **Config format** with filters: @@ -60,16 +65,17 @@ targets: **Pattern syntax:** `filepath.Match` globs — `*` matches any non-separator chars, `?` matches single char. -**Precedence:** Include filters apply first (whitelist), then exclude filters remove from that set. No filters = all skills. +**Precedence:** Include filters apply first (whitelist), then exclude filters remove from that set. No filters = all matching resources. Agent filters require a target with an agents path, and they are ignored in `symlink` mode. ## Skill-Level Targets -Skills can declare which targets they should sync to via a `targets` frontmatter field in SKILL.md: +Skills can declare which targets they should sync to via `metadata.targets` in SKILL.md. Top-level `targets` is still supported for older skills, but `metadata.targets` wins when both are present: ```yaml --- name: enterprise-skill -targets: [claude, cursor] +metadata: + targets: [claude, cursor] --- ``` diff --git a/website/docs/how-to/daily-tasks/filtering-skills.md b/website/docs/how-to/daily-tasks/filtering-skills.md index 7b6a8798..2134a358 100644 --- a/website/docs/how-to/daily-tasks/filtering-skills.md +++ b/website/docs/how-to/daily-tasks/filtering-skills.md @@ -9,13 +9,14 @@ Pick the scenario that matches your goal. ## Sync a skill to specific targets only -Add a `targets` field (or `metadata.targets`) to the skill's SKILL.md frontmatter. +Add `metadata.targets` (preferred) to the skill's SKILL.md frontmatter. The skill will only sync to the listed targets. ```yaml --- name: my-cursor-only-skill -targets: [cursor] +metadata: + targets: [cursor] --- ``` diff --git a/website/docs/reference/commands/target.md b/website/docs/reference/commands/target.md index 6488f48f..d7fdc51a 100644 --- a/website/docs/reference/commands/target.md +++ b/website/docs/reference/commands/target.md @@ -180,19 +180,28 @@ skillshare sync ## Target Filters (include/exclude) -Manage per-target include/exclude filters from the CLI: +Manage per-target include/exclude filters for both skills and agents from the CLI: ```bash +# Skills skillshare target claude --add-include "team-*" skillshare target claude --add-exclude "_legacy*" skillshare target claude --remove-include "team-*" skillshare target claude --remove-exclude "_legacy*" + +# Agents +skillshare target claude --add-agent-include "team-*" +skillshare target claude --add-agent-exclude "draft-*" +skillshare target claude --remove-agent-include "team-*" +skillshare target claude --remove-agent-exclude "draft-*" ``` After changing filters, run `skillshare sync` to apply. Filters work in **merge and copy modes**. Patterns use Go `filepath.Match` syntax (`*`, `?`, `[...]`). In symlink mode, filters are ignored. +Agent filters are only available for targets that have an agents path, either from a built-in target definition or an explicit `agents.path` override in config. + See [Configuration](/docs/reference/targets/configuration#include--exclude-target-filters) for pattern cheat sheet and scenarios. :::tip @@ -224,11 +233,16 @@ No additional options. | Flag | Description | |------|-------------| | `--mode, -m ` | Set sync mode (merge, copy, or symlink) | +| `--agent-mode ` | Set agents sync mode (merge, copy, or symlink) | | `--target-naming ` | Set target naming (flat or standard) | | `--add-include ` | Add an include filter pattern | | `--add-exclude ` | Add an exclude filter pattern | | `--remove-include ` | Remove an include filter pattern | | `--remove-exclude ` | Remove an exclude filter pattern | +| `--add-agent-include ` | Add an agent include filter pattern | +| `--add-agent-exclude ` | Add an agent exclude filter pattern | +| `--remove-agent-include ` | Remove an agent include filter pattern | +| `--remove-agent-exclude ` | Remove an agent exclude filter pattern | ## Supported AI CLIs @@ -262,12 +276,21 @@ skillshare sync skillshare target claude --mode symlink skillshare sync -# Add/remove filters +# Configure agent sync mode +skillshare target claude --agent-mode copy +skillshare sync + +# Add/remove skill filters skillshare target claude --add-include "team-*" skillshare target claude --add-exclude "_legacy*" skillshare target claude --remove-include "team-*" skillshare sync +# Add/remove agent filters +skillshare target claude --add-agent-include "team-*" +skillshare target claude --add-agent-exclude "draft-*" +skillshare sync + # Remove target (restores skills) skillshare target remove cursor ``` @@ -283,6 +306,7 @@ skillshare target remove cursor -p # Remove targe skillshare target list -p # List project targets skillshare target claude -p # Show target info skillshare target claude --add-include "team-*" -p # Add filter +skillshare target claude --add-agent-include "team-*" -p # Add agent filter ``` ### How It Differs diff --git a/website/docs/reference/filtering.md b/website/docs/reference/filtering.md index cffc4de2..420554e2 100644 --- a/website/docs/reference/filtering.md +++ b/website/docs/reference/filtering.md @@ -15,8 +15,8 @@ See [Filtering Skills](/docs/how-to/daily-tasks/filtering-skills) for a scenario | Layer | Scope | Where to set | Syntax | Evaluated at | |-------|-------|-------------|--------|-------------| | `.skillignore` | Hides from all targets | Source dir or tracked repo root | [gitignore](https://git-scm.com/docs/gitignore) | Discovery | -| SKILL.md `targets` | Restricts to listed targets | Per skill frontmatter | YAML list | Sync (parsed at discovery) | -| Target include/exclude | Per target | `config.yaml` or CLI flags | Go [`filepath.Match`](https://pkg.go.dev/path/filepath#Match) glob | Sync | +| SKILL.md `metadata.targets` | Restricts skills to listed targets | Per skill frontmatter | YAML list | Sync (parsed at discovery) | +| Target include/exclude | Per target, per resource | `config.yaml` or CLI flags | Go [`filepath.Match`](https://pkg.go.dev/path/filepath#Match) glob | Sync | :::note Sync mode caveat All three layers only apply to **merge** and **copy** sync modes. @@ -29,7 +29,7 @@ A skill must pass **all** layers to reach a target: 1. **`.skillignore`** — evaluated at discovery. Matching skills never enter the sync pipeline. 2. **Target include/exclude** — evaluated at sync (`FilterSkills`). Skills are discovered but skipped for non-matching targets. -3. **SKILL.md `targets`** — evaluated at sync (`FilterSkillsByTarget`). Skills are restricted to their declared targets. +3. **SKILL.md `metadata.targets`** — evaluated at sync (`FilterSkillsByTarget`). Skills are restricted to their declared targets. ## .skillignore @@ -56,15 +56,15 @@ A skill must pass **all** layers to reach a target: **Format:** Top-level or nested under `metadata`: ```yaml -# Either format works -targets: [claude, cursor] - -# Or nested +# Preferred metadata: targets: [claude, cursor] + +# Legacy fallback +targets: [claude, cursor] ``` -**Behavior:** Whitelist — the skill only syncs to the listed targets. Omitting the field means sync to all targets. +**Behavior:** Whitelist — the skill only syncs to the listed targets. Omitting the field means sync to all targets. If both `metadata.targets` and top-level `targets` are present, `metadata.targets` wins. **Aliases:** Target names support aliases. `claude` matches a target configured as `claude-code`. See [Supported Targets](/docs/reference/targets/supported-targets). @@ -75,14 +75,20 @@ metadata: **Set via CLI:** ```bash +# Skills skillshare target claude --add-include "team-*" skillshare target cursor --add-exclude "legacy-*" skillshare target claude --remove-include "team-*" + +# Agents +skillshare target claude --add-agent-include "team-*" +skillshare target claude --add-agent-exclude "draft-*" +skillshare target claude --remove-agent-include "team-*" ``` -**Stored in:** `config.yaml` under `targets..include` / `targets..exclude`. +**Stored in:** `config.yaml` under `targets..include` / `targets..exclude` for skills, and `targets..agents.include` / `targets..agents.exclude` for agents. -**Syntax:** Go [`filepath.Match`](https://pkg.go.dev/path/filepath#Match) glob patterns matched against the flat skill name (e.g., `_team__frontend__ui`). +**Syntax:** Go [`filepath.Match`](https://pkg.go.dev/path/filepath#Match) glob patterns matched against the flat resource name. Skills use flat skill names (e.g., `_team__frontend__ui`); agents use flat `.md` filenames. | Supported | Not supported | |-----------|--------------| @@ -90,7 +96,7 @@ skillshare target claude --remove-include "team-*" | `?` (single char) | `{a,b}` (brace expansion) | | `[abc]` (char class) | | -**Precedence:** When both `include` and `exclude` are set, `include` is applied first, then `exclude`. A skill matching both is excluded. +**Precedence:** When both `include` and `exclude` are set, `include` is applied first, then `exclude`. A matching resource that hits both is excluded. **Visual editor:** `skillshare ui` → Targets page → "Customize filters" button. diff --git a/website/docs/reference/targets/configuration.md b/website/docs/reference/targets/configuration.md index 380ea170..f2e18c50 100644 --- a/website/docs/reference/targets/configuration.md +++ b/website/docs/reference/targets/configuration.md @@ -297,24 +297,32 @@ Result for `cursor`: Instead of editing YAML manually, use the `target` command: ```bash +# Skills skillshare target claude --add-include "team-*" skillshare target claude --add-exclude "_legacy*" skillshare target claude --remove-include "team-*" + +# Agents (only for targets with an agents path) +skillshare target claude --add-agent-include "team-*" +skillshare target claude --add-agent-exclude "draft-*" +skillshare target claude --remove-agent-include "team-*" + skillshare sync # Apply changes ``` -Duplicate patterns are silently ignored. Invalid glob patterns return an error. +Duplicate patterns are silently ignored. Invalid glob patterns return an error. Agent filters use the same glob syntax as skill filters, but only work in `merge` and `copy` modes. In `symlink` mode, agent filters are ignored because the entire agents directory is linked as one unit. See [target command](/docs/reference/commands/target#target-filters-includeexclude) for full reference. #### Skill-level targets {#skill-level-targets} -Skills can declare which targets they're compatible with using the `targets` field in SKILL.md: +Skills can declare which targets they're compatible with using `metadata.targets` in SKILL.md. A top-level `targets` field is still supported as a fallback for older skills, but `metadata.targets` takes precedence when both are present: ```yaml --- name: claude-prompts -targets: [claude] +metadata: + targets: [claude] --- ``` diff --git a/website/docs/understand/skill-format.md b/website/docs/understand/skill-format.md index 3d36c49c..78a4cffc 100644 --- a/website/docs/understand/skill-format.md +++ b/website/docs/understand/skill-format.md @@ -200,6 +200,8 @@ metadata: Currently, `targets` is the only `metadata` field that skillshare processes. Other fields (like `pattern`, `domain`, `interaction`) are preserved in the frontmatter but not used by skillshare — they may be consumed by other tools in the ecosystem. +For backward compatibility, skillshare also reads a top-level `targets` field. If both are present, `metadata.targets` takes precedence. + ## Custom Fields You can add any custom top-level fields: From 5836a8802f87c79116c625eb525f2f87297e1445 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 03:50:28 +0800 Subject: [PATCH 125/205] feat: add agent filter support to Filter Studio, targets CLI, and overview API Backend: - PATCH /api/targets/:name accepts agent_include/agent_exclude fields - POST /api/sync-matrix/preview returns agent entries with kind=agent - GET /api/overview returns agentsSource and extrasSource paths - Fix ResolveProjectTargets to resolve Agents sub-key (path + filters) - Add agent filter CLI flags: --add-agent-include, --add-agent-exclude, --remove-agent-include, --remove-agent-exclude, --agent-mode - Unify target list JSON/TUI to show agent info and filters Frontend: - FilterStudioPage: kind-driven single-context view (?kind=skill|agent) - KindBadge: use design system tokens (ss-badge + wobble in playful) - TargetsPage: link to Filter Studio with ?kind= param - ResourceDetailPage: capitalize type labels (Local, GitHub) - ResourcesPage: sync activeTab from URL searchParams on navigation - DashboardPage: show agents/extras source dirs, link skills card with ?tab=skills for reliable tab switching - API client: extend updateTarget and previewSyncMatrix for agent filters Tests: - Agent include/exclude round-trip persistence - Invalid agent pattern returns 400 - Preview with agent entries and kind field - No agent entries for targets without agent path - CLI target agent filter integration tests --- cmd/skillshare/target.go | 186 +++++-- cmd/skillshare/target_agents.go | 122 +++++ cmd/skillshare/target_agents_test.go | 458 ++++++++++++++++++ cmd/skillshare/target_helpers.go | 107 +++- cmd/skillshare/target_helpers_test.go | 93 +++- cmd/skillshare/target_list_tui.go | 366 ++++++++++++-- cmd/skillshare/target_list_tui_item.go | 9 +- cmd/skillshare/target_project.go | 158 ++++-- internal/config/config.go | 9 + internal/config/project.go | 36 +- internal/server/handler_helpers_test.go | 26 + internal/server/handler_overview.go | 18 +- internal/server/handler_overview_test.go | 30 ++ .../server/handler_resources_agents_test.go | 229 +++++++++ internal/server/handler_skills.go | 168 ++++--- internal/server/handler_sync_matrix.go | 48 +- internal/server/handler_sync_matrix_test.go | 112 +++++ internal/server/handler_targets.go | 25 +- .../server/handler_targets_agents_test.go | 213 ++++++++ internal/server/handler_targets_test.go | 67 +++ internal/server/handler_toggle.go | 37 +- internal/server/handler_update.go | 106 +++- internal/server/resource_agents.go | 50 ++ internal/targetsummary/agents.go | 185 +++++++ tests/integration/target_filter_test.go | 102 ++++ ui/src/api/client.ts | 45 +- ui/src/components/KindBadge.tsx | 18 +- ui/src/components/TargetMenu.tsx | 48 +- ui/src/pages/DashboardPage.tsx | 32 +- ui/src/pages/FilterStudioPage.tsx | 112 +++-- ui/src/pages/ResourceDetailPage.tsx | 51 +- ui/src/pages/ResourcesPage.tsx | 254 +++++++--- ui/src/pages/TargetsPage.tsx | 4 +- 33 files changed, 3117 insertions(+), 407 deletions(-) create mode 100644 cmd/skillshare/target_agents.go create mode 100644 cmd/skillshare/target_agents_test.go create mode 100644 internal/server/handler_resources_agents_test.go create mode 100644 internal/server/handler_targets_agents_test.go create mode 100644 internal/server/resource_agents.go create mode 100644 internal/targetsummary/agents.go diff --git a/cmd/skillshare/target.go b/cmd/skillshare/target.go index 38202afe..71afcca8 100644 --- a/cmd/skillshare/target.go +++ b/cmd/skillshare/target.go @@ -11,6 +11,7 @@ import ( "skillshare/internal/config" "skillshare/internal/oplog" "skillshare/internal/sync" + "skillshare/internal/targetsummary" "skillshare/internal/ui" "skillshare/internal/utils" "skillshare/internal/validate" @@ -113,11 +114,16 @@ Options: Target Settings: --mode Set sync mode (merge, symlink, or copy) + --agent-mode Set agents sync mode (merge, symlink, or copy) --target-naming Set target naming (flat or standard) --add-include Add an include filter pattern --add-exclude Add an exclude filter pattern --remove-include Remove an include filter pattern --remove-exclude Remove an exclude filter pattern + --add-agent-include Add an agent include filter pattern + --add-agent-exclude Add an agent exclude filter pattern + --remove-agent-include Remove an agent include filter pattern + --remove-agent-exclude Remove an agent exclude filter pattern Examples: skillshare target add cursor @@ -125,7 +131,9 @@ Examples: skillshare target remove cursor skillshare target list skillshare target cursor + skillshare target claude --agent-mode copy skillshare target claude --add-include "team-*" + skillshare target claude --add-agent-include "team-*" skillshare target claude --remove-include "team-*" skillshare target claude --add-exclude "_legacy*" @@ -434,11 +442,17 @@ func unlinkMergeMode(targetPath, sourcePath string) error { // targetListJSONItem is the JSON representation for a single target. type targetListJSONItem struct { - Name string `json:"name"` - Path string `json:"path"` - Mode string `json:"mode"` - Include []string `json:"include"` - Exclude []string `json:"exclude"` + Name string `json:"name"` + Path string `json:"path"` + Mode string `json:"mode"` + Include []string `json:"include"` + Exclude []string `json:"exclude"` + AgentPath string `json:"agentPath,omitempty"` + AgentMode string `json:"agentMode,omitempty"` + AgentInclude []string `json:"agentInclude,omitempty"` + AgentExclude []string `json:"agentExclude,omitempty"` + AgentLinkedCount *int `json:"agentLinkedCount,omitempty"` + AgentExpectedCount *int `json:"agentExpectedCount,omitempty"` } func targetList(jsonOutput bool) error { @@ -451,30 +465,39 @@ func targetList(jsonOutput bool) error { return targetListJSON(cfg) } - ui.Header("Configured Targets") - for name, target := range cfg.Targets { - sc := target.SkillsConfig() - mode := sc.Mode - if mode == "" { - mode = "merge" - } - fmt.Printf(" %-12s %s (%s)\n", name, sc.Path, mode) + items, err := buildTargetTUIItems(false, "") + if err != nil { + return err } + ui.Header("Configured Targets") + printTargetListPlain(items) + return nil } func targetListJSON(cfg *config.Config) error { + agentBuilder, err := targetsummary.NewGlobalBuilder(cfg) + if err != nil { + return err + } + var items []targetListJSONItem for name, target := range cfg.Targets { sc := target.SkillsConfig() - items = append(items, targetListJSONItem{ + item := targetListJSONItem{ Name: name, Path: sc.Path, Mode: getTargetMode(sc.Mode, cfg.Mode), Include: sc.Include, Exclude: sc.Exclude, - }) + } + agentSummary, err := agentBuilder.GlobalTarget(name, target) + if err != nil { + return err + } + applyTargetListAgentSummary(&item, agentSummary) + items = append(items, item) } output := struct { Targets []targetListJSONItem `json:"targets"` @@ -488,6 +511,10 @@ func targetInfo(name string, args []string) error { if err != nil { return err } + settings, err := parseTargetSettingFlags(remaining) + if err != nil { + return err + } cfg, err := config.Load() if err != nil { @@ -499,36 +526,59 @@ func targetInfo(name string, args []string) error { return fmt.Errorf("target '%s' not found. Use 'skillshare target list' to see available targets", name) } - // Parse --mode and --target-naming from remaining args - var newMode, newNaming string - for i := 0; i < len(remaining); i++ { - switch remaining[i] { - case "--mode", "-m": - if i+1 >= len(remaining) { - return fmt.Errorf("--mode requires a value (merge, symlink, or copy)") - } - newMode = remaining[i+1] - i++ - case "--target-naming": - if i+1 >= len(remaining) { - return fmt.Errorf("--target-naming requires a value (flat or standard)") - } - newNaming = remaining[i+1] - i++ - } - } - // Apply filter updates if any if filterOpts.hasUpdates() { start := time.Now() - s := target.EnsureSkills() - changes, fErr := applyFilterUpdates(&s.Include, &s.Exclude, filterOpts) - if fErr != nil { - return fErr + var changes []string + mutated := false + + if filterOpts.Skills.hasUpdates() { + s := target.EnsureSkills() + skillChanges, fErr := applyFilterUpdates(&s.Include, &s.Exclude, filterOpts.Skills) + if fErr != nil { + return fErr + } + changes = append(changes, skillChanges...) + mutated = true } - cfg.Targets[name] = target - if err := cfg.Save(); err != nil { - return err + + if filterOpts.Agents.hasUpdates() { + agentBuilder, buildErr := targetsummary.NewGlobalBuilder(cfg) + if buildErr != nil { + return buildErr + } + agentSummary, buildErr := agentBuilder.GlobalTarget(name, target) + if buildErr != nil { + return buildErr + } + if agentSummary == nil { + return fmt.Errorf("target '%s' does not have an agents path", name) + } + if agentSummary.Mode == "symlink" { + return fmt.Errorf("target '%s' agent include/exclude filters are ignored in symlink mode; use --agent-mode merge or --agent-mode copy first", name) + } + + ac := target.AgentsConfig() + include := append([]string(nil), ac.Include...) + exclude := append([]string(nil), ac.Exclude...) + agentChanges, fErr := applyFilterUpdates(&include, &exclude, filterOpts.Agents) + if fErr != nil { + return fErr + } + if len(agentChanges) > 0 { + a := target.EnsureAgents() + a.Include = include + a.Exclude = exclude + mutated = true + } + changes = append(changes, scopeFilterChanges("agents", agentChanges)...) + } + + if mutated { + cfg.Targets[name] = target + if err := cfg.Save(); err != nil { + return err + } } for _, change := range changes { ui.Success("%s: %s", name, change) @@ -548,13 +598,17 @@ func targetInfo(name string, args []string) error { } // If --mode is provided, update the mode - if newMode != "" { - return updateTargetMode(cfg, name, target, newMode) + if settings.SkillMode != "" { + return updateTargetMode(cfg, name, target, settings.SkillMode) + } + + if settings.AgentMode != "" { + return updateTargetAgentMode(cfg, name, target, settings.AgentMode) } // If --target-naming is provided, update the naming - if newNaming != "" { - return updateTargetNaming(cfg, name, target, newNaming) + if settings.Naming != "" { + return updateTargetNaming(cfg, name, target, settings.Naming) } // Show target info @@ -586,6 +640,38 @@ func updateTargetMode(cfg *config.Config, name string, target config.TargetConfi return nil } +func updateTargetAgentMode(cfg *config.Config, name string, target config.TargetConfig, newMode string) error { + if newMode != "merge" && newMode != "symlink" && newMode != "copy" { + return fmt.Errorf("invalid agent mode '%s'. Use 'merge', 'symlink', or 'copy'", newMode) + } + + agentBuilder, err := targetsummary.NewGlobalBuilder(cfg) + if err != nil { + return err + } + agentSummary, err := agentBuilder.GlobalTarget(name, target) + if err != nil { + return err + } + if agentSummary == nil { + return fmt.Errorf("target '%s' does not have an agents path", name) + } + + oldMode := agentSummary.Mode + target.EnsureAgents().Mode = newMode + cfg.Targets[name] = target + if err := cfg.Save(); err != nil { + return err + } + + ui.Success("Changed %s agent mode: %s -> %s", name, oldMode, newMode) + if newMode == "symlink" && (len(agentSummary.Include) > 0 || len(agentSummary.Exclude) > 0) { + ui.Warning("Agent include/exclude filters are ignored in symlink mode") + } + ui.Info("Run 'skillshare sync' to apply the new mode") + return nil +} + func updateTargetNaming(cfg *config.Config, name string, target config.TargetConfig, newNaming string) error { if !config.IsValidTargetNaming(newNaming) { return fmt.Errorf("invalid target naming '%s'. Use 'flat' or 'standard'", newNaming) @@ -636,6 +722,15 @@ func showTargetInfo(cfg *config.Config, name string, target config.TargetConfig) namingDisplay += " (default)" } + agentBuilder, err := targetsummary.NewGlobalBuilder(cfg) + if err != nil { + return err + } + agentSummary, err := agentBuilder.GlobalTarget(name, target) + if err != nil { + return err + } + ui.Header(fmt.Sprintf("Target: %s", name)) fmt.Printf(" Path: %s\n", sc.Path) fmt.Printf(" Mode: %s\n", modeDisplay) @@ -643,6 +738,7 @@ func showTargetInfo(cfg *config.Config, name string, target config.TargetConfig) fmt.Printf(" Status: %s\n", statusLine) fmt.Printf(" Include: %s\n", formatFilterList(sc.Include)) fmt.Printf(" Exclude: %s\n", formatFilterList(sc.Exclude)) + printTargetAgentSection(agentSummary) return nil } diff --git a/cmd/skillshare/target_agents.go b/cmd/skillshare/target_agents.go new file mode 100644 index 00000000..91c7218a --- /dev/null +++ b/cmd/skillshare/target_agents.go @@ -0,0 +1,122 @@ +package main + +import ( + "fmt" + + "skillshare/internal/config" + "skillshare/internal/sync" + "skillshare/internal/targetsummary" +) + +func applyTargetListAgentSummary(item *targetListJSONItem, summary *targetsummary.AgentSummary) { + if summary == nil { + return + } + + item.AgentPath = summary.Path + item.AgentMode = summary.Mode + item.AgentInclude = append([]string(nil), summary.Include...) + item.AgentExclude = append([]string(nil), summary.Exclude...) + item.AgentLinkedCount = intPtr(summary.ManagedCount) + item.AgentExpectedCount = intPtr(summary.ExpectedCount) +} + +func printTargetAgentSection(summary *targetsummary.AgentSummary) { + if summary == nil { + return + } + + displayPath := summary.DisplayPath + if displayPath == "" { + displayPath = summary.Path + } + + fmt.Println(" Agents:") + fmt.Printf(" Path: %s\n", displayPath) + fmt.Printf(" Mode: %s\n", summary.Mode) + fmt.Printf(" Status: %s\n", formatTargetAgentSyncSummary(summary)) + if summary.Mode == "symlink" { + fmt.Println(" Filters: ignored in symlink mode") + return + } + fmt.Printf(" Include: %s\n", formatFilterList(summary.Include)) + fmt.Printf(" Exclude: %s\n", formatFilterList(summary.Exclude)) +} + +func printTargetListPlain(items []targetTUIItem) { + for idx, item := range items { + if idx > 0 { + fmt.Println() + } + + sc := item.target.SkillsConfig() + displayPath := item.displayPath + if displayPath == "" { + displayPath = sc.Path + } + + fmt.Printf(" %s\n", item.name) + fmt.Println(" Skills:") + fmt.Printf(" Path: %s\n", displayPath) + fmt.Printf(" Mode: %s\n", sync.EffectiveMode(sc.Mode)) + fmt.Printf(" Naming: %s\n", config.EffectiveTargetNaming(sc.TargetNaming)) + fmt.Printf(" Sync: %s\n", item.skillSync) + if len(sc.Include) == 0 && len(sc.Exclude) == 0 { + fmt.Println(" No include/exclude filters") + } else { + fmt.Printf(" Include: %s\n", formatFilterList(sc.Include)) + fmt.Printf(" Exclude: %s\n", formatFilterList(sc.Exclude)) + } + + if item.agentSummary == nil { + continue + } + + agentPath := item.agentSummary.DisplayPath + if agentPath == "" { + agentPath = item.agentSummary.Path + } + fmt.Println(" Agents:") + fmt.Printf(" Path: %s\n", agentPath) + fmt.Printf(" Mode: %s\n", item.agentSummary.Mode) + fmt.Printf(" Sync: %s\n", formatTargetAgentSyncSummary(item.agentSummary)) + if item.agentSummary.Mode == "symlink" { + fmt.Println(" Filters: ignored in symlink mode") + } else if len(item.agentSummary.Include) == 0 && len(item.agentSummary.Exclude) == 0 { + fmt.Println(" No agent include/exclude filters") + } else { + fmt.Printf(" Include: %s\n", formatFilterList(item.agentSummary.Include)) + fmt.Printf(" Exclude: %s\n", formatFilterList(item.agentSummary.Exclude)) + } + } +} + +func formatTargetAgentSyncSummary(summary *targetsummary.AgentSummary) string { + if summary == nil { + return "" + } + + if summary.ExpectedCount == 0 { + if summary.ManagedCount > 0 { + return fmt.Sprintf("no source agents yet (%d %s)", summary.ManagedCount, targetAgentCountLabel(summary.Mode)) + } + return "no source agents yet" + } + + summaryText := fmt.Sprintf("%d/%d %s", summary.ManagedCount, summary.ExpectedCount, targetAgentCountLabel(summary.Mode)) + if summary.Mode == "symlink" { + return summaryText + " (directory symlink)" + } + return summaryText +} + +func targetAgentCountLabel(mode string) string { + if mode == "copy" { + return "managed" + } + return "linked" +} + +func intPtr(v int) *int { + return &v +} diff --git a/cmd/skillshare/target_agents_test.go b/cmd/skillshare/target_agents_test.go new file mode 100644 index 00000000..c0c007b6 --- /dev/null +++ b/cmd/skillshare/target_agents_test.go @@ -0,0 +1,458 @@ +package main + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "skillshare/internal/config" + "skillshare/internal/targetsummary" + "skillshare/internal/testutil" +) + +func TestShowTargetInfo_ShowsAgentsSectionForBuiltinTarget(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + writeGlobalTargetConfig(t, sb, "claude", targetPath, "") + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + + agentSource := cfg.EffectiveAgentsSource() + agentFile := writeAgentFile(t, agentSource, "reviewer.md") + agentTarget := filepath.Join(sb.Home, ".claude", "agents") + linkAgentFile(t, agentTarget, "reviewer.md", agentFile) + + output := stripANSIWarnings(captureStdout(t, func() { + if err := showTargetInfo(cfg, "claude", cfg.Targets["claude"]); err != nil { + t.Fatalf("showTargetInfo: %v", err) + } + })) + + if !strings.Contains(output, "Agents:") { + t.Fatalf("expected agents section in output:\n%s", output) + } + if !strings.Contains(output, agentTarget) { + t.Fatalf("expected agent path %q in output:\n%s", agentTarget, output) + } + if !strings.Contains(output, "1/1 linked") { + t.Fatalf("expected linked summary in output:\n%s", output) + } +} + +func TestShowTargetInfo_OmitsAgentsSectionWhenTargetHasNoAgentsPath(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := filepath.Join(sb.Root, "custom-skills") + if err := os.MkdirAll(targetPath, 0755); err != nil { + t.Fatalf("mkdir target: %v", err) + } + writeGlobalTargetConfig(t, sb, "custom-tool", targetPath, "") + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + + output := stripANSIWarnings(captureStdout(t, func() { + if err := showTargetInfo(cfg, "custom-tool", cfg.Targets["custom-tool"]); err != nil { + t.Fatalf("showTargetInfo: %v", err) + } + })) + + if strings.Contains(output, "Agents:") { + t.Fatalf("did not expect agents section:\n%s", output) + } +} + +func TestTargetListJSON_IncludesAgentMetadata(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + writeGlobalTargetConfig(t, sb, "claude", targetPath, "") + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + + agentSource := cfg.EffectiveAgentsSource() + agentFile := writeAgentFile(t, agentSource, "reviewer.md") + agentTarget := filepath.Join(sb.Home, ".claude", "agents") + linkAgentFile(t, agentTarget, "reviewer.md", agentFile) + + output := captureStdout(t, func() { + if err := targetListJSON(cfg); err != nil { + t.Fatalf("targetListJSON: %v", err) + } + }) + + var resp struct { + Targets []targetListJSONItem `json:"targets"` + } + if err := json.Unmarshal([]byte(output), &resp); err != nil { + t.Fatalf("decode json: %v\n%s", err, output) + } + if len(resp.Targets) != 1 { + t.Fatalf("expected 1 target, got %d", len(resp.Targets)) + } + + target := resp.Targets[0] + if target.AgentPath != agentTarget { + t.Fatalf("agent path = %q, want %q", target.AgentPath, agentTarget) + } + if target.AgentMode != "merge" { + t.Fatalf("agent mode = %q, want merge", target.AgentMode) + } + if target.AgentLinkedCount == nil || *target.AgentLinkedCount != 1 { + t.Fatalf("agent linked = %v, want 1", target.AgentLinkedCount) + } + if target.AgentExpectedCount == nil || *target.AgentExpectedCount != 1 { + t.Fatalf("agent expected = %v, want 1", target.AgentExpectedCount) + } +} + +func TestTargetList_TextOutputShowsSkillsAndAgentsSections(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + writeGlobalTargetConfig(t, sb, "claude", targetPath, "") + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + + agentSource := cfg.EffectiveAgentsSource() + agentFile := writeAgentFile(t, agentSource, "reviewer.md") + agentTarget := filepath.Join(sb.Home, ".claude", "agents") + linkAgentFile(t, agentTarget, "reviewer.md", agentFile) + + output := stripANSIWarnings(captureStdout(t, func() { + if err := targetList(false); err != nil { + t.Fatalf("targetList: %v", err) + } + })) + + for _, want := range []string{ + "claude", + "Skills:", + targetPath, + "Sync:", + "Agents:", + agentTarget, + "1/1 linked", + "No include/exclude filters", + "No agent include/exclude filters", + } { + if !strings.Contains(output, want) { + t.Fatalf("expected %q in target list output:\n%s", want, output) + } + } +} + +func TestRenderTargetDetail_AgentSection(t *testing.T) { + cases := []struct { + name string + item targetTUIItem + want []string + notContains []string + }{ + { + name: "merge target shows builtin-like agents section", + item: targetTUIItem{ + name: "cursor", + displayPath: ".cursor/skills", + skillSync: "merged (4 shared, 1 local)", + target: config.TargetConfig{ + Skills: &config.ResourceTargetConfig{ + Path: "/tmp/cursor/skills", + Mode: "merge", + TargetNaming: "flat", + }, + }, + agentSummary: &targetsummary.AgentSummary{ + DisplayPath: ".cursor/agents", + Path: "/tmp/cursor/agents", + Mode: "merge", + ManagedCount: 2, + ExpectedCount: 3, + Include: []string{"team-*"}, + }, + }, + want: []string{"Skills:", ".cursor/skills", "Sync:", "merged (4 shared, 1 local)", "Agents:", ".cursor/agents", "2/3 linked", "Agent Include:", "team-*"}, + }, + { + name: "copy target shows custom agents section", + item: targetTUIItem{ + name: "custom", + displayPath: "/tmp/custom/skills", + skillSync: "copied (2 managed, 0 local)", + target: config.TargetConfig{ + Skills: &config.ResourceTargetConfig{ + Path: "/tmp/custom/skills", + Mode: "copy", + TargetNaming: "flat", + }, + }, + agentSummary: &targetsummary.AgentSummary{ + DisplayPath: "/tmp/custom/agents", + Path: "/tmp/custom/agents", + Mode: "copy", + ManagedCount: 2, + ExpectedCount: 2, + }, + }, + want: []string{"Skills:", "/tmp/custom/skills", "Sync:", "copied (2 managed, 0 local)", "Agents:", "/tmp/custom/agents", "2/2 managed", "No agent include/exclude filters"}, + }, + { + name: "symlink agent target shows filters ignored warning", + item: targetTUIItem{ + name: "claude", + displayPath: ".claude/skills", + skillSync: "merged (7 shared, 0 local)", + target: config.TargetConfig{ + Skills: &config.ResourceTargetConfig{ + Path: "/tmp/claude/skills", + Mode: "merge", + TargetNaming: "flat", + }, + }, + agentSummary: &targetsummary.AgentSummary{ + DisplayPath: ".claude/agents", + Path: "/tmp/claude/agents", + Mode: "symlink", + ManagedCount: 5, + ExpectedCount: 5, + Include: []string{"team-*"}, + Exclude: []string{"draft-*"}, + }, + }, + want: []string{"Agents:", ".claude/agents", "5/5 linked (directory symlink)", "Agent include/exclude filters ignored in symlink mode"}, + notContains: []string{"Agent Include:", "Agent Exclude:", "No agent include/exclude filters"}, + }, + { + name: "unsupported target omits agents section", + item: targetTUIItem{ + name: "custom-tool", + displayPath: "/tmp/custom-tool/skills", + skillSync: "not exist (0 shared, 0 local)", + target: config.TargetConfig{ + Skills: &config.ResourceTargetConfig{ + Path: "/tmp/custom-tool/skills", + Mode: "merge", + TargetNaming: "flat", + }, + }, + }, + want: []string{"Skills:", "/tmp/custom-tool/skills", "Sync:", "not exist (0 shared, 0 local)"}, + notContains: []string{"Agents:", "No agent include/exclude filters"}, + }, + } + + model := targetListTUIModel{} + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + rendered := stripANSIWarnings(model.renderTargetDetail(tc.item)) + for _, want := range tc.want { + if !strings.Contains(rendered, want) { + t.Fatalf("expected %q in output:\n%s", want, rendered) + } + } + for _, unwanted := range tc.notContains { + if strings.Contains(rendered, unwanted) { + t.Fatalf("did not expect %q in output:\n%s", unwanted, rendered) + } + } + }) + } +} + +func TestTargetScopeOptions_DisablesAgentFiltersInSymlinkMode(t *testing.T) { + item := targetTUIItem{ + name: "claude", + agentSummary: &targetsummary.AgentSummary{ + Mode: "symlink", + }, + } + + options := targetScopeOptions(item, "include") + if len(options) != 2 { + t.Fatalf("expected 2 scope options, got %d", len(options)) + } + if !options[0].enabled || options[0].scope != "skills" { + t.Fatalf("expected skills option enabled, got %+v", options[0]) + } + if options[1].scope != "agents" || options[1].enabled { + t.Fatalf("expected agents option disabled, got %+v", options[1]) + } + if options[1].disabled != "ignored in symlink mode" { + t.Fatalf("unexpected disabled reason: %+v", options[1]) + } + + if got := moveScopePickerCursor(options, 0, 1); got != 0 { + t.Fatalf("cursor should stay on skills when agents is disabled, got %d", got) + } +} + +func TestDoSetTargetMode_Agents_GlobalAndProject(t *testing.T) { + t.Run("global", func(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + writeGlobalTargetConfig(t, sb, "claude", targetPath, "") + + model := targetListTUIModel{} + if _, err := model.doSetTargetMode("claude", "agents", "copy"); err != nil { + t.Fatalf("doSetTargetMode: %v", err) + } + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + target := cfg.Targets["claude"] + if got := target.AgentsConfig().Mode; got != "copy" { + t.Fatalf("agent mode = %q, want copy", got) + } + }) + + t.Run("project", func(t *testing.T) { + root := writeProjectTargetConfig(t, []config.ProjectTargetEntry{{Name: "claude"}}) + + model := targetListTUIModel{ + projCfg: &config.ProjectConfig{}, + cwd: root, + } + if _, err := model.doSetTargetMode("claude", "agents", "copy"); err != nil { + t.Fatalf("doSetTargetMode: %v", err) + } + + cfg, err := config.LoadProject(root) + if err != nil { + t.Fatalf("load project config: %v", err) + } + if got := cfg.Targets[0].AgentsConfig().Mode; got != "copy" { + t.Fatalf("project agent mode = %q, want copy", got) + } + }) +} + +func TestDoAddAndRemovePattern_Agents_GlobalAndProject(t *testing.T) { + t.Run("global", func(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + writeGlobalTargetConfig(t, sb, "claude", targetPath, "") + + model := targetListTUIModel{} + if _, err := model.doAddPattern("claude", "agents", "include", "team-*"); err != nil { + t.Fatalf("doAddPattern: %v", err) + } + if _, err := model.doRemovePattern("claude", "agents", "include", "team-*"); err != nil { + t.Fatalf("doRemovePattern: %v", err) + } + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + target := cfg.Targets["claude"] + if got := target.AgentsConfig().Include; len(got) != 0 { + t.Fatalf("agent include = %v, want empty", got) + } + }) + + t.Run("project", func(t *testing.T) { + root := writeProjectTargetConfig(t, []config.ProjectTargetEntry{{Name: "claude"}}) + + model := targetListTUIModel{ + projCfg: &config.ProjectConfig{}, + cwd: root, + } + if _, err := model.doAddPattern("claude", "agents", "exclude", "draft-*"); err != nil { + t.Fatalf("doAddPattern: %v", err) + } + if _, err := model.doRemovePattern("claude", "agents", "exclude", "draft-*"); err != nil { + t.Fatalf("doRemovePattern: %v", err) + } + + cfg, err := config.LoadProject(root) + if err != nil { + t.Fatalf("load project config: %v", err) + } + if got := cfg.Targets[0].AgentsConfig().Exclude; len(got) != 0 { + t.Fatalf("project agent exclude = %v, want empty", got) + } + }) +} + +func writeGlobalTargetConfig(t *testing.T, sb *testutil.Sandbox, name, skillsPath, agentPath string) { + t.Helper() + + var b strings.Builder + b.WriteString("source: " + sb.SourcePath + "\n") + b.WriteString("mode: merge\n") + b.WriteString("targets:\n") + b.WriteString(" " + name + ":\n") + b.WriteString(" skills:\n") + b.WriteString(" path: " + skillsPath + "\n") + if agentPath != "" { + b.WriteString(" agents:\n") + b.WriteString(" path: " + agentPath + "\n") + } + sb.WriteConfig(b.String()) +} + +func writeAgentFile(t *testing.T, dir, name string) string { + t.Helper() + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatalf("mkdir agent source: %v", err) + } + path := filepath.Join(dir, name) + if err := os.WriteFile(path, []byte("# "+name), 0644); err != nil { + t.Fatalf("write agent file: %v", err) + } + return path +} + +func linkAgentFile(t *testing.T, dir, name, source string) string { + t.Helper() + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatalf("mkdir agent target: %v", err) + } + linkPath := filepath.Join(dir, name) + if err := os.Symlink(source, linkPath); err != nil { + t.Fatalf("symlink agent: %v", err) + } + return linkPath +} + +func writeProjectTargetConfig(t *testing.T, targets []config.ProjectTargetEntry) string { + t.Helper() + + root := t.TempDir() + if err := os.MkdirAll(filepath.Join(root, ".skillshare", "skills"), 0755); err != nil { + t.Fatalf("mkdir project skills: %v", err) + } + if err := os.MkdirAll(filepath.Join(root, ".skillshare", "agents"), 0755); err != nil { + t.Fatalf("mkdir project agents: %v", err) + } + + cfg := &config.ProjectConfig{Targets: targets} + if err := cfg.Save(root); err != nil { + t.Fatalf("save project config: %v", err) + } + return root +} diff --git a/cmd/skillshare/target_helpers.go b/cmd/skillshare/target_helpers.go index cd95f94a..ba33fe8f 100644 --- a/cmd/skillshare/target_helpers.go +++ b/cmd/skillshare/target_helpers.go @@ -32,11 +32,26 @@ func (o filterUpdateOpts) hasUpdates() bool { len(o.RemoveInclude) > 0 || len(o.RemoveExclude) > 0 } +type parsedTargetFilterFlags struct { + Skills filterUpdateOpts + Agents filterUpdateOpts +} + +func (o parsedTargetFilterFlags) hasUpdates() bool { + return o.Skills.hasUpdates() || o.Agents.hasUpdates() +} + +type parsedTargetSettingFlags struct { + SkillMode string + AgentMode string + Naming string +} + // parseFilterFlags extracts --add-include, --add-exclude, --remove-include, -// --remove-exclude flags from args. Returns the parsed opts and any -// remaining (non-filter) arguments. -func parseFilterFlags(args []string) (filterUpdateOpts, []string, error) { - var opts filterUpdateOpts +// --remove-exclude flags from args for both skills and agents. +// Returns the parsed opts and any remaining (non-filter) arguments. +func parseFilterFlags(args []string) (parsedTargetFilterFlags, []string, error) { + var opts parsedTargetFilterFlags var rest []string for i := 0; i < len(args); i++ { @@ -46,25 +61,49 @@ func parseFilterFlags(args []string) (filterUpdateOpts, []string, error) { return opts, nil, fmt.Errorf("--add-include requires a value") } i++ - opts.AddInclude = append(opts.AddInclude, args[i]) + opts.Skills.AddInclude = append(opts.Skills.AddInclude, args[i]) case "--add-exclude": if i+1 >= len(args) { return opts, nil, fmt.Errorf("--add-exclude requires a value") } i++ - opts.AddExclude = append(opts.AddExclude, args[i]) + opts.Skills.AddExclude = append(opts.Skills.AddExclude, args[i]) case "--remove-include": if i+1 >= len(args) { return opts, nil, fmt.Errorf("--remove-include requires a value") } i++ - opts.RemoveInclude = append(opts.RemoveInclude, args[i]) + opts.Skills.RemoveInclude = append(opts.Skills.RemoveInclude, args[i]) case "--remove-exclude": if i+1 >= len(args) { return opts, nil, fmt.Errorf("--remove-exclude requires a value") } i++ - opts.RemoveExclude = append(opts.RemoveExclude, args[i]) + opts.Skills.RemoveExclude = append(opts.Skills.RemoveExclude, args[i]) + case "--add-agent-include": + if i+1 >= len(args) { + return opts, nil, fmt.Errorf("--add-agent-include requires a value") + } + i++ + opts.Agents.AddInclude = append(opts.Agents.AddInclude, args[i]) + case "--add-agent-exclude": + if i+1 >= len(args) { + return opts, nil, fmt.Errorf("--add-agent-exclude requires a value") + } + i++ + opts.Agents.AddExclude = append(opts.Agents.AddExclude, args[i]) + case "--remove-agent-include": + if i+1 >= len(args) { + return opts, nil, fmt.Errorf("--remove-agent-include requires a value") + } + i++ + opts.Agents.RemoveInclude = append(opts.Agents.RemoveInclude, args[i]) + case "--remove-agent-exclude": + if i+1 >= len(args) { + return opts, nil, fmt.Errorf("--remove-agent-exclude requires a value") + } + i++ + opts.Agents.RemoveExclude = append(opts.Agents.RemoveExclude, args[i]) default: rest = append(rest, args[i]) } @@ -73,6 +112,35 @@ func parseFilterFlags(args []string) (filterUpdateOpts, []string, error) { return opts, rest, nil } +func parseTargetSettingFlags(args []string) (parsedTargetSettingFlags, error) { + var settings parsedTargetSettingFlags + + for i := 0; i < len(args); i++ { + switch args[i] { + case "--mode", "-m": + if i+1 >= len(args) { + return settings, fmt.Errorf("--mode requires a value (merge, symlink, or copy)") + } + settings.SkillMode = args[i+1] + i++ + case "--agent-mode": + if i+1 >= len(args) { + return settings, fmt.Errorf("--agent-mode requires a value (merge, symlink, or copy)") + } + settings.AgentMode = args[i+1] + i++ + case "--target-naming": + if i+1 >= len(args) { + return settings, fmt.Errorf("--target-naming requires a value (flat or standard)") + } + settings.Naming = args[i+1] + i++ + } + } + + return settings, nil +} + // applyFilterUpdates modifies include/exclude slices according to opts. // It validates patterns with filepath.Match, deduplicates, and returns // a human-readable list of changes applied. @@ -120,6 +188,29 @@ func applyFilterUpdates(include, exclude *[]string, opts filterUpdateOpts) ([]st return changes, nil } +func scopeFilterChanges(scope string, changes []string) []string { + if scope != "agents" { + return changes + } + + scoped := make([]string, len(changes)) + for i, change := range changes { + switch { + case strings.HasPrefix(change, "added include: "): + scoped[i] = strings.Replace(change, "added include: ", "added agent include: ", 1) + case strings.HasPrefix(change, "added exclude: "): + scoped[i] = strings.Replace(change, "added exclude: ", "added agent exclude: ", 1) + case strings.HasPrefix(change, "removed include: "): + scoped[i] = strings.Replace(change, "removed include: ", "removed agent include: ", 1) + case strings.HasPrefix(change, "removed exclude: "): + scoped[i] = strings.Replace(change, "removed exclude: ", "removed agent exclude: ", 1) + default: + scoped[i] = change + } + } + return scoped +} + func containsPattern(patterns []string, p string) bool { for _, existing := range patterns { if existing == p { diff --git a/cmd/skillshare/target_helpers_test.go b/cmd/skillshare/target_helpers_test.go index bf3d6477..181f6d97 100644 --- a/cmd/skillshare/target_helpers_test.go +++ b/cmd/skillshare/target_helpers_test.go @@ -10,14 +10,14 @@ func TestParseFilterFlags(t *testing.T) { tests := []struct { name string args []string - wantOpts filterUpdateOpts + wantOpts parsedTargetFilterFlags wantRest []string wantErr bool }{ { name: "no flags", args: []string{"--mode", "merge"}, - wantOpts: filterUpdateOpts{}, + wantOpts: parsedTargetFilterFlags{}, wantRest: []string{"--mode", "merge"}, }, { @@ -28,11 +28,13 @@ func TestParseFilterFlags(t *testing.T) { "--remove-include", "old-*", "--remove-exclude", "test-*", }, - wantOpts: filterUpdateOpts{ - AddInclude: []string{"team-*"}, - AddExclude: []string{"_legacy*"}, - RemoveInclude: []string{"old-*"}, - RemoveExclude: []string{"test-*"}, + wantOpts: parsedTargetFilterFlags{ + Skills: filterUpdateOpts{ + AddInclude: []string{"team-*"}, + AddExclude: []string{"_legacy*"}, + RemoveInclude: []string{"old-*"}, + RemoveExclude: []string{"test-*"}, + }, }, }, { @@ -41,8 +43,23 @@ func TestParseFilterFlags(t *testing.T) { "--add-include", "a-*", "--add-include", "b-*", }, - wantOpts: filterUpdateOpts{ - AddInclude: []string{"a-*", "b-*"}, + wantOpts: parsedTargetFilterFlags{ + Skills: filterUpdateOpts{ + AddInclude: []string{"a-*", "b-*"}, + }, + }, + }, + { + name: "agent flags", + args: []string{ + "--add-agent-include", "team-*", + "--remove-agent-exclude", "draft-*", + }, + wantOpts: parsedTargetFilterFlags{ + Agents: filterUpdateOpts{ + AddInclude: []string{"team-*"}, + RemoveExclude: []string{"draft-*"}, + }, }, }, { @@ -50,9 +67,15 @@ func TestParseFilterFlags(t *testing.T) { args: []string{ "--mode", "merge", "--add-include", "team-*", + "--add-agent-exclude", "draft-*", }, - wantOpts: filterUpdateOpts{ - AddInclude: []string{"team-*"}, + wantOpts: parsedTargetFilterFlags{ + Skills: filterUpdateOpts{ + AddInclude: []string{"team-*"}, + }, + Agents: filterUpdateOpts{ + AddExclude: []string{"draft-*"}, + }, }, wantRest: []string{"--mode", "merge"}, }, @@ -91,15 +114,39 @@ func TestParseFilterFlags(t *testing.T) { t.Fatalf("unexpected error: %v", err) } - assertStringSlice(t, "AddInclude", opts.AddInclude, tt.wantOpts.AddInclude) - assertStringSlice(t, "AddExclude", opts.AddExclude, tt.wantOpts.AddExclude) - assertStringSlice(t, "RemoveInclude", opts.RemoveInclude, tt.wantOpts.RemoveInclude) - assertStringSlice(t, "RemoveExclude", opts.RemoveExclude, tt.wantOpts.RemoveExclude) + assertStringSlice(t, "Skills.AddInclude", opts.Skills.AddInclude, tt.wantOpts.Skills.AddInclude) + assertStringSlice(t, "Skills.AddExclude", opts.Skills.AddExclude, tt.wantOpts.Skills.AddExclude) + assertStringSlice(t, "Skills.RemoveInclude", opts.Skills.RemoveInclude, tt.wantOpts.Skills.RemoveInclude) + assertStringSlice(t, "Skills.RemoveExclude", opts.Skills.RemoveExclude, tt.wantOpts.Skills.RemoveExclude) + assertStringSlice(t, "Agents.AddInclude", opts.Agents.AddInclude, tt.wantOpts.Agents.AddInclude) + assertStringSlice(t, "Agents.AddExclude", opts.Agents.AddExclude, tt.wantOpts.Agents.AddExclude) + assertStringSlice(t, "Agents.RemoveInclude", opts.Agents.RemoveInclude, tt.wantOpts.Agents.RemoveInclude) + assertStringSlice(t, "Agents.RemoveExclude", opts.Agents.RemoveExclude, tt.wantOpts.Agents.RemoveExclude) assertStringSlice(t, "rest", rest, tt.wantRest) }) } } +func TestParseTargetSettingFlags(t *testing.T) { + settings, err := parseTargetSettingFlags([]string{ + "--mode", "copy", + "--agent-mode", "merge", + "--target-naming", "standard", + }) + if err != nil { + t.Fatalf("parseTargetSettingFlags: %v", err) + } + if settings.SkillMode != "copy" { + t.Fatalf("SkillMode = %q, want copy", settings.SkillMode) + } + if settings.AgentMode != "merge" { + t.Fatalf("AgentMode = %q, want merge", settings.AgentMode) + } + if settings.Naming != "standard" { + t.Fatalf("Naming = %q, want standard", settings.Naming) + } +} + func TestApplyFilterUpdates(t *testing.T) { tests := []struct { name string @@ -225,6 +272,22 @@ func TestFilterUpdateOpts_HasUpdates(t *testing.T) { } } +func TestScopeFilterChanges_Agents(t *testing.T) { + changes := scopeFilterChanges("agents", []string{ + "added include: team-*", + "added exclude: draft-*", + "removed include: team-*", + "removed exclude: draft-*", + }) + want := []string{ + "added agent include: team-*", + "added agent exclude: draft-*", + "removed agent include: team-*", + "removed agent exclude: draft-*", + } + assertStringSlice(t, "changes", changes, want) +} + func TestFindUnknownSkillTargets_CustomTargets(t *testing.T) { discovered := []ssync.DiscoveredSkill{ {RelPath: "skill-a", Targets: []string{"claude", "custom-tool"}}, diff --git a/cmd/skillshare/target_list_tui.go b/cmd/skillshare/target_list_tui.go index 37dfe193..1b097ec4 100644 --- a/cmd/skillshare/target_list_tui.go +++ b/cmd/skillshare/target_list_tui.go @@ -2,11 +2,13 @@ package main import ( "fmt" + "path/filepath" "sort" "strings" "skillshare/internal/config" "skillshare/internal/sync" + "skillshare/internal/targetsummary" "github.com/charmbracelet/bubbles/list" "github.com/charmbracelet/bubbles/spinner" @@ -59,6 +61,7 @@ type targetListTUIModel struct { // Mode picker overlay showModePicker bool modePickerTarget string // target name being edited + modePickerScope string // "skills" or "agents" modeCursor int // Naming picker overlay @@ -70,6 +73,7 @@ type targetListTUIModel struct { editingFilter bool // true when in I/E edit mode editFilterType string // "include" or "exclude" editFilterTarget string // target name being edited + editFilterScope string // "skills" or "agents" editPatterns []string editCursor int // selected pattern index editAdding bool @@ -79,6 +83,12 @@ type targetListTUIModel struct { confirming bool confirmTarget string + // Scope picker overlay for M/I/E when both skills and agents are available. + showScopePicker bool + scopePickerTarget string + scopePickerAction string // "mode", "include", "exclude" + scopePickerCursor int + // Exit-with-action (for destructive ops dispatched after TUI exit) action string // "remove" or "" (normal quit) @@ -86,6 +96,12 @@ type targetListTUIModel struct { lastActionMsg string } +type targetScopeOption struct { + scope string + enabled bool + disabled string +} + func newTargetListTUIModel( modeLabel string, cfg *config.Config, @@ -152,18 +168,30 @@ func buildTargetTUIItems(isProject bool, cwd string) ([]targetTUIItem, error) { if err != nil { return nil, err } + resolvedTargets, err := config.ResolveProjectTargets(cwd, projCfg) + if err != nil { + return nil, err + } + agentBuilder, err := targetsummary.NewProjectBuilder(cwd) + if err != nil { + return nil, err + } for _, entry := range projCfg.Targets { - sc := entry.SkillsConfig() + resolved, ok := resolvedTargets[entry.Name] + if !ok { + continue + } + agentSummary, err := agentBuilder.ProjectTarget(entry) + if err != nil { + return nil, err + } items = append(items, targetTUIItem{ - name: entry.Name, - target: config.TargetConfig{ - Skills: &config.ResourceTargetConfig{ - Path: projectTargetDisplayPath(entry), - Mode: sc.Mode, - Include: sc.Include, - Exclude: sc.Exclude, - }, - }, + name: entry.Name, + target: resolved, + displayPath: projectTargetDisplayPath(entry), + skillSync: buildTargetSkillSyncSummary(resolved.SkillsConfig().Path, filepath.Join(cwd, ".skillshare", "skills"), resolved.SkillsConfig().Mode), + agentConfig: config.ResourceTargetConfig{Mode: agentSummaryMode(agentSummary), Include: agentSummaryInclude(agentSummary), Exclude: agentSummaryExclude(agentSummary)}, + agentSummary: agentSummary, }) } } else { @@ -171,8 +199,23 @@ func buildTargetTUIItems(isProject bool, cwd string) ([]targetTUIItem, error) { if err != nil { return nil, err } + agentBuilder, err := targetsummary.NewGlobalBuilder(cfg) + if err != nil { + return nil, err + } for name, t := range cfg.Targets { - items = append(items, targetTUIItem{name: name, target: t}) + agentSummary, err := agentBuilder.GlobalTarget(name, t) + if err != nil { + return nil, err + } + items = append(items, targetTUIItem{ + name: name, + target: t, + displayPath: t.SkillsConfig().Path, + skillSync: buildTargetSkillSyncSummary(t.SkillsConfig().Path, cfg.Source, t.SkillsConfig().Mode), + agentConfig: config.ResourceTargetConfig{Mode: agentSummaryMode(agentSummary), Include: agentSummaryInclude(agentSummary), Exclude: agentSummaryExclude(agentSummary)}, + agentSummary: agentSummary, + }) } } sort.Slice(items, func(i, j int) bool { @@ -241,6 +284,9 @@ func (m targetListTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { if m.showModePicker { return m.handleModePickerKey(msg) } + if m.showScopePicker { + return m.handleScopePickerKey(msg) + } if m.showNamingPicker { return m.handleNamingPickerKey(msg) } @@ -303,6 +349,9 @@ func (m targetListTUIModel) handleNormalKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) return m, textinput.Blink case "M": if item, ok := m.list.SelectedItem().(targetTUIItem); ok { + if item.agentSummary != nil { + return m.openScopePicker(item, "mode") + } return m.openModePicker(item.name, item.target) } return m, nil @@ -313,11 +362,17 @@ func (m targetListTUIModel) handleNormalKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) return m, nil case "I": if item, ok := m.list.SelectedItem().(targetTUIItem); ok { + if item.agentSummary != nil { + return m.openScopePicker(item, "include") + } return m.openFilterEdit(item.name, "include", item.target.SkillsConfig().Include) } return m, nil case "E": if item, ok := m.list.SelectedItem().(targetTUIItem); ok { + if item.agentSummary != nil { + return m.openScopePicker(item, "exclude") + } return m.openFilterEdit(item.name, "exclude", item.target.SkillsConfig().Exclude) } return m, nil @@ -412,10 +467,15 @@ func (m targetListTUIModel) handleConfirmKey(msg tea.KeyMsg) (tea.Model, tea.Cmd var targetSyncModes = config.ExtraSyncModes // ["merge", "copy", "symlink"] func (m targetListTUIModel) openModePicker(name string, target config.TargetConfig) (tea.Model, tea.Cmd) { + return m.openModePickerForScope(name, target.SkillsConfig(), "skills") +} + +func (m targetListTUIModel) openModePickerForScope(name string, currentConfig config.ResourceTargetConfig, scope string) (tea.Model, tea.Cmd) { m.showModePicker = true m.modePickerTarget = name + m.modePickerScope = scope m.modeCursor = 0 - current := sync.EffectiveMode(target.SkillsConfig().Mode) + current := sync.EffectiveMode(currentConfig.Mode) for i, mode := range targetSyncModes { if mode == current { m.modeCursor = i @@ -445,15 +505,16 @@ func (m targetListTUIModel) handleModePickerKey(msg tea.KeyMsg) (tea.Model, tea. m.showModePicker = false newMode := targetSyncModes[m.modeCursor] name := m.modePickerTarget + scope := m.modePickerScope return m, func() tea.Msg { - msg, err := m.doSetTargetMode(name, newMode) + msg, err := m.doSetTargetMode(name, scope, newMode) return targetListActionDoneMsg{msg: msg, err: err} } } return m, nil } -func (m targetListTUIModel) doSetTargetMode(name, newMode string) (string, error) { +func (m targetListTUIModel) doSetTargetMode(name, scope, newMode string) (string, error) { if m.projCfg != nil { projCfg, err := config.LoadProject(m.cwd) if err != nil { @@ -461,7 +522,8 @@ func (m targetListTUIModel) doSetTargetMode(name, newMode string) (string, error } for i, entry := range projCfg.Targets { if entry.Name == name { - projCfg.Targets[i].EnsureSkills().Mode = newMode + targetCfg := scopeSetterProject(&projCfg.Targets[i], scope) + targetCfg.Mode = newMode break } } @@ -474,13 +536,14 @@ func (m targetListTUIModel) doSetTargetMode(name, newMode string) (string, error return "", err } t := cfg.Targets[name] - t.EnsureSkills().Mode = newMode + targetCfg := scopeSetterGlobal(&t, scope) + targetCfg.Mode = newMode cfg.Targets[name] = t if err := cfg.Save(); err != nil { return "", err } } - return fmt.Sprintf("✓ Set %s mode to %s", name, newMode), nil + return fmt.Sprintf("✓ Set %s %s mode to %s", name, scope, newMode), nil } // ─── Naming Picker ────────────────────────────────────────────────── @@ -560,9 +623,14 @@ func (m targetListTUIModel) doSetTargetNaming(name, newNaming string) (string, e // ─── Include/Exclude Edit Sub-Panel ────────────────────────────────── func (m targetListTUIModel) openFilterEdit(name, filterType string, patterns []string) (tea.Model, tea.Cmd) { + return m.openFilterEditForScope(name, "skills", filterType, patterns) +} + +func (m targetListTUIModel) openFilterEditForScope(name, scope, filterType string, patterns []string) (tea.Model, tea.Cmd) { m.editingFilter = true m.editFilterType = filterType m.editFilterTarget = name + m.editFilterScope = scope m.editPatterns = make([]string, len(patterns)) copy(m.editPatterns, patterns) m.editCursor = 0 @@ -604,9 +672,10 @@ func (m targetListTUIModel) handleFilterEditKey(msg tea.KeyMsg) (tea.Model, tea. m.editCursor-- } name := m.editFilterTarget + scope := m.editFilterScope filterType := m.editFilterType return m, func() tea.Msg { - msg, err := m.doRemovePattern(name, filterType, pattern) + msg, err := m.doRemovePattern(name, scope, filterType, pattern) return targetListActionDoneMsg{msg: msg, err: err} } } @@ -631,9 +700,10 @@ func (m targetListTUIModel) handleFilterEditAddKey(msg tea.KeyMsg) (tea.Model, t m.editPatterns = append(m.editPatterns, pattern) m.editCursor = len(m.editPatterns) - 1 name := m.editFilterTarget + scope := m.editFilterScope filterType := m.editFilterType return m, func() tea.Msg { - msg, err := m.doAddPattern(name, filterType, pattern) + msg, err := m.doAddPattern(name, scope, filterType, pattern) return targetListActionDoneMsg{msg: msg, err: err} } } @@ -642,7 +712,7 @@ func (m targetListTUIModel) handleFilterEditAddKey(msg tea.KeyMsg) (tea.Model, t return m, cmd } -func (m targetListTUIModel) doAddPattern(name, filterType, pattern string) (string, error) { +func (m targetListTUIModel) doAddPattern(name, scope, filterType, pattern string) (string, error) { if m.projCfg != nil { projCfg, err := config.LoadProject(m.cwd) if err != nil { @@ -650,11 +720,11 @@ func (m targetListTUIModel) doAddPattern(name, filterType, pattern string) (stri } for i, entry := range projCfg.Targets { if entry.Name == name { - sk := projCfg.Targets[i].EnsureSkills() + targetCfg := scopeSetterProject(&projCfg.Targets[i], scope) if filterType == "include" { - sk.Include = append(sk.Include, pattern) + targetCfg.Include = append(targetCfg.Include, pattern) } else { - sk.Exclude = append(sk.Exclude, pattern) + targetCfg.Exclude = append(targetCfg.Exclude, pattern) } break } @@ -668,21 +738,21 @@ func (m targetListTUIModel) doAddPattern(name, filterType, pattern string) (stri return "", err } t := cfg.Targets[name] - sk := t.EnsureSkills() + targetCfg := scopeSetterGlobal(&t, scope) if filterType == "include" { - sk.Include = append(sk.Include, pattern) + targetCfg.Include = append(targetCfg.Include, pattern) } else { - sk.Exclude = append(sk.Exclude, pattern) + targetCfg.Exclude = append(targetCfg.Exclude, pattern) } cfg.Targets[name] = t if err := cfg.Save(); err != nil { return "", err } } - return fmt.Sprintf("✓ Added %s pattern: %s", filterType, pattern), nil + return fmt.Sprintf("✓ Added %s %s pattern: %s", scope, filterType, pattern), nil } -func (m targetListTUIModel) doRemovePattern(name, filterType, pattern string) (string, error) { +func (m targetListTUIModel) doRemovePattern(name, scope, filterType, pattern string) (string, error) { removeFromSlice := func(slice []string, val string) []string { var result []string for _, s := range slice { @@ -700,11 +770,11 @@ func (m targetListTUIModel) doRemovePattern(name, filterType, pattern string) (s } for i, entry := range projCfg.Targets { if entry.Name == name { - sk := projCfg.Targets[i].EnsureSkills() + targetCfg := scopeSetterProject(&projCfg.Targets[i], scope) if filterType == "include" { - sk.Include = removeFromSlice(sk.Include, pattern) + targetCfg.Include = removeFromSlice(targetCfg.Include, pattern) } else { - sk.Exclude = removeFromSlice(sk.Exclude, pattern) + targetCfg.Exclude = removeFromSlice(targetCfg.Exclude, pattern) } break } @@ -718,18 +788,18 @@ func (m targetListTUIModel) doRemovePattern(name, filterType, pattern string) (s return "", err } t := cfg.Targets[name] - sk := t.EnsureSkills() + targetCfg := scopeSetterGlobal(&t, scope) if filterType == "include" { - sk.Include = removeFromSlice(sk.Include, pattern) + targetCfg.Include = removeFromSlice(targetCfg.Include, pattern) } else { - sk.Exclude = removeFromSlice(sk.Exclude, pattern) + targetCfg.Exclude = removeFromSlice(targetCfg.Exclude, pattern) } cfg.Targets[name] = t if err := cfg.Save(); err != nil { return "", err } } - return fmt.Sprintf("✓ Removed %s pattern: %s", filterType, pattern), nil + return fmt.Sprintf("✓ Removed %s %s pattern: %s", scope, filterType, pattern), nil } // ---- View ------------------------------------------------------------------- @@ -747,6 +817,9 @@ func (m targetListTUIModel) View() string { if m.showModePicker { return m.renderModePicker() } + if m.showScopePicker { + return m.renderScopePicker() + } if m.showNamingPicker { return m.renderNamingPicker() } @@ -831,7 +904,7 @@ func renderTargetActionMsg(msg string) string { } func (m targetListTUIModel) renderTargetHelp(scrollInfo string) string { - helpText := "↑↓ navigate / filter Ctrl+d/u scroll M mode N naming I include E exclude R remove q quit" + helpText := "↑↓ navigate / filter Ctrl+d/u scroll M mode(sk/ag) N naming(sk) I include(sk/ag) E exclude(sk/ag) R remove q quit" if m.filtering { helpText = "Enter lock Esc clear q quit" } @@ -885,9 +958,15 @@ func (m targetListTUIModel) renderTargetDetail(item targetTUIItem) string { fmt.Fprintf(&b, "%s\n\n", tc.Title.Render(item.name)) sc := item.target.SkillsConfig() - fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Path:"), shortenPath(sc.Path)) + displayPath := item.displayPath + if displayPath == "" { + displayPath = sc.Path + } + fmt.Fprintf(&b, "%s\n", tc.Dim.Render("Skills:")) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Path:"), shortenPath(displayPath)) fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Mode:"), sync.EffectiveMode(sc.Mode)) fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Naming:"), config.EffectiveTargetNaming(sc.TargetNaming)) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Sync:"), item.skillSync) if len(sc.Include) > 0 { fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("Include:")) @@ -906,9 +985,52 @@ func (m targetListTUIModel) renderTargetDetail(item targetTUIItem) string { fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("No include/exclude filters")) } + if item.agentSummary != nil { + agentPath := item.agentSummary.DisplayPath + if agentPath == "" { + agentPath = item.agentSummary.Path + } + + fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("Agents:")) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Path:"), shortenPath(agentPath)) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Mode:"), item.agentSummary.Mode) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Sync:"), formatTargetAgentSyncSummary(item.agentSummary)) + + if item.agentSummary.Mode == "symlink" { + fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("Agent include/exclude filters ignored in symlink mode")) + } else if len(item.agentSummary.Include) > 0 { + fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("Agent Include:")) + for _, p := range item.agentSummary.Include { + fmt.Fprintf(&b, " %s\n", p) + } + } + if item.agentSummary.Mode != "symlink" && len(item.agentSummary.Exclude) > 0 { + fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("Agent Exclude:")) + for _, p := range item.agentSummary.Exclude { + fmt.Fprintf(&b, " %s\n", p) + } + } + if item.agentSummary.Mode != "symlink" && len(item.agentSummary.Include) == 0 && len(item.agentSummary.Exclude) == 0 { + fmt.Fprintf(&b, "\n%s\n", tc.Dim.Render("No agent include/exclude filters")) + } + } + return b.String() } +func buildTargetSkillSyncSummary(targetPath, sourcePath, mode string) string { + switch sync.EffectiveMode(mode) { + case "copy": + status, managed, local := sync.CheckStatusCopy(targetPath) + return fmt.Sprintf("%s (%d managed, %d local)", status, managed, local) + case "merge": + status, linked, local := sync.CheckStatusMerge(targetPath, sourcePath) + return fmt.Sprintf("%s (%d shared, %d local)", status, linked, local) + default: + return sync.CheckStatus(targetPath, sourcePath).String() + } +} + // ---- Overlay renders -------------------------------------------------------- func (m targetListTUIModel) renderConfirmOverlay() string { @@ -924,7 +1046,7 @@ func (m targetListTUIModel) renderConfirmOverlay() string { func (m targetListTUIModel) renderModePicker() string { var b strings.Builder - fmt.Fprintf(&b, "\n%s\n", tc.Title.Render("Change mode")) + fmt.Fprintf(&b, "\n%s\n", tc.Title.Render("Change "+m.modePickerScope+" mode")) fmt.Fprintf(&b, "%s %s\n\n", tc.Dim.Render("Target:"), m.modePickerTarget) for i, mode := range targetSyncModes { @@ -952,6 +1074,39 @@ func (m targetListTUIModel) renderModePicker() string { return b.String() } +func (m targetListTUIModel) renderScopePicker() string { + var b strings.Builder + item, ok := m.list.SelectedItem().(targetTUIItem) + if !ok { + return "" + } + options := targetScopeOptions(item, m.scopePickerAction) + + fmt.Fprintf(&b, "\n%s\n", tc.Title.Render("Choose resource")) + fmt.Fprintf(&b, "%s %s\n", tc.Dim.Render("Target:"), m.scopePickerTarget) + fmt.Fprintf(&b, "%s %s\n\n", tc.Dim.Render("Action:"), m.scopePickerAction) + + for i, option := range options { + cursor := " " + if i == m.scopePickerCursor { + cursor = tc.Cyan.Render(">") + " " + } + label := capitalize(option.scope) + if option.enabled { + if i == m.scopePickerCursor { + fmt.Fprintf(&b, "%s%s\n", cursor, tc.Cyan.Render(label)) + } else { + fmt.Fprintf(&b, "%s%s\n", cursor, label) + } + continue + } + fmt.Fprintf(&b, "%s%s%s\n", cursor, tc.Dim.Render(label), tc.Dim.Render(" ("+option.disabled+")")) + } + + fmt.Fprintf(&b, "\n%s\n", tc.Help.Render("↑↓ select Enter confirm Esc cancel")) + return b.String() +} + func (m targetListTUIModel) renderNamingPicker() string { var b strings.Builder @@ -985,7 +1140,7 @@ func (m targetListTUIModel) renderFilterEditPanel() string { var b strings.Builder title := capitalize(m.editFilterType) - fmt.Fprintf(&b, "%s %s\n", tc.Title.Render(title+" patterns"), tc.Dim.Render("("+m.editFilterTarget+")")) + fmt.Fprintf(&b, "%s %s\n", tc.Title.Render(title+" "+m.editFilterScope+" patterns"), tc.Dim.Render("("+m.editFilterTarget+")")) fmt.Fprintln(&b) if len(m.editPatterns) == 0 { @@ -1067,3 +1222,136 @@ func runTargetListTUI(mode runMode, cwd string) (string, string, error) { } return m.action, m.confirmTarget, nil } + +func (m targetListTUIModel) openScopePicker(item targetTUIItem, action string) (tea.Model, tea.Cmd) { + m.showScopePicker = true + m.scopePickerTarget = item.name + m.scopePickerAction = action + m.scopePickerCursor = firstEnabledScopeOption(targetScopeOptions(item, action)) + m.lastActionMsg = "" + return m, nil +} + +func (m targetListTUIModel) handleScopePickerKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + item, ok := m.list.SelectedItem().(targetTUIItem) + if !ok { + m.showScopePicker = false + return m, nil + } + options := targetScopeOptions(item, m.scopePickerAction) + + switch msg.String() { + case "q", "esc": + m.showScopePicker = false + return m, nil + case "up", "k": + m.scopePickerCursor = moveScopePickerCursor(options, m.scopePickerCursor, -1) + return m, nil + case "down", "j": + m.scopePickerCursor = moveScopePickerCursor(options, m.scopePickerCursor, 1) + return m, nil + case "enter": + if len(options) == 0 || !options[m.scopePickerCursor].enabled { + m.showScopePicker = false + m.lastActionMsg = "✗ Agents include/exclude filters are ignored in symlink mode" + return m, nil + } + m.showScopePicker = false + scope := options[m.scopePickerCursor].scope + switch m.scopePickerAction { + case "mode": + return m.openModePickerForScope(item.name, itemConfigForScope(item, scope), scope) + case "include": + return m.openFilterEditForScope(item.name, scope, "include", itemConfigForScope(item, scope).Include) + case "exclude": + return m.openFilterEditForScope(item.name, scope, "exclude", itemConfigForScope(item, scope).Exclude) + } + return m, nil + } + return m, nil +} + +func targetScopeOptions(item targetTUIItem, action string) []targetScopeOption { + options := []targetScopeOption{{scope: "skills", enabled: true}} + if item.agentSummary == nil { + return options + } + + option := targetScopeOption{scope: "agents", enabled: true} + if (action == "include" || action == "exclude") && item.agentSummary.Mode == "symlink" { + option.enabled = false + option.disabled = "ignored in symlink mode" + } + return append(options, option) +} + +func firstEnabledScopeOption(options []targetScopeOption) int { + for i, option := range options { + if option.enabled { + return i + } + } + return 0 +} + +func moveScopePickerCursor(options []targetScopeOption, current, delta int) int { + if len(options) == 0 { + return 0 + } + if current < 0 || current >= len(options) { + current = firstEnabledScopeOption(options) + } + next := current + for { + candidate := next + delta + if candidate < 0 || candidate >= len(options) { + return current + } + next = candidate + if options[next].enabled { + return next + } + } +} + +func itemConfigForScope(item targetTUIItem, scope string) config.ResourceTargetConfig { + if scope == "agents" { + return item.agentConfig + } + return item.target.SkillsConfig() +} + +func scopeSetterGlobal(target *config.TargetConfig, scope string) *config.ResourceTargetConfig { + if scope == "agents" { + return target.EnsureAgents() + } + return target.EnsureSkills() +} + +func scopeSetterProject(target *config.ProjectTargetEntry, scope string) *config.ResourceTargetConfig { + if scope == "agents" { + return target.EnsureAgents() + } + return target.EnsureSkills() +} + +func agentSummaryMode(summary *targetsummary.AgentSummary) string { + if summary == nil { + return "" + } + return summary.Mode +} + +func agentSummaryInclude(summary *targetsummary.AgentSummary) []string { + if summary == nil { + return nil + } + return append([]string(nil), summary.Include...) +} + +func agentSummaryExclude(summary *targetsummary.AgentSummary) []string { + if summary == nil { + return nil + } + return append([]string(nil), summary.Exclude...) +} diff --git a/cmd/skillshare/target_list_tui_item.go b/cmd/skillshare/target_list_tui_item.go index ed45acb5..6aa4c615 100644 --- a/cmd/skillshare/target_list_tui_item.go +++ b/cmd/skillshare/target_list_tui_item.go @@ -6,6 +6,7 @@ import ( "skillshare/internal/config" "skillshare/internal/sync" + "skillshare/internal/targetsummary" "github.com/charmbracelet/bubbles/list" tea "github.com/charmbracelet/bubbletea" @@ -13,8 +14,12 @@ import ( // targetTUIItem wraps a target entry for the bubbles/list widget. type targetTUIItem struct { - name string - target config.TargetConfig + name string + target config.TargetConfig + displayPath string + skillSync string + agentConfig config.ResourceTargetConfig + agentSummary *targetsummary.AgentSummary } func (i targetTUIItem) FilterValue() string { return i.name } diff --git a/cmd/skillshare/target_project.go b/cmd/skillshare/target_project.go index f5e7761f..5c8c5429 100644 --- a/cmd/skillshare/target_project.go +++ b/cmd/skillshare/target_project.go @@ -11,6 +11,7 @@ import ( "skillshare/internal/config" "skillshare/internal/oplog" "skillshare/internal/sync" + "skillshare/internal/targetsummary" "skillshare/internal/ui" "skillshare/internal/utils" "skillshare/internal/validate" @@ -277,16 +278,27 @@ func targetListProjectWithJSON(root string, jsonOutput bool) error { }) if jsonOutput { + agentBuilder, err := targetsummary.NewProjectBuilder(root) + if err != nil { + return err + } + var items []targetListJSONItem for _, entry := range targets { sc := entry.SkillsConfig() - items = append(items, targetListJSONItem{ + item := targetListJSONItem{ Name: entry.Name, Path: projectTargetDisplayPath(entry), Mode: getTargetMode(sc.Mode, ""), Include: sc.Include, Exclude: sc.Exclude, - }) + } + agentSummary, err := agentBuilder.ProjectTarget(entry) + if err != nil { + return err + } + applyTargetListAgentSummary(&item, agentSummary) + items = append(items, item) } output := struct { Targets []targetListJSONItem `json:"targets"` @@ -294,17 +306,14 @@ func targetListProjectWithJSON(root string, jsonOutput bool) error { return writeJSON(&output) } - ui.Header("Configured Targets (project)") - for _, entry := range targets { - displayPath := projectTargetDisplayPath(entry) - sc := entry.SkillsConfig() - mode := sc.Mode - if mode == "" { - mode = "merge" - } - fmt.Printf(" %-12s %s (%s)\n", entry.Name, displayPath, mode) + items, err := buildTargetTUIItems(true, root) + if err != nil { + return err } + ui.Header("Configured Targets (project)") + printTargetListPlain(items) + return nil } @@ -320,23 +329,9 @@ func targetInfoProject(name string, args []string, root string) error { if err != nil { return err } - - var newMode, newNaming string - for i := 0; i < len(remaining); i++ { - switch remaining[i] { - case "--mode", "-m": - if i+1 >= len(remaining) { - return fmt.Errorf("--mode requires a value (merge, symlink, or copy)") - } - newMode = remaining[i+1] - i++ - case "--target-naming": - if i+1 >= len(remaining) { - return fmt.Errorf("--target-naming requires a value (flat or standard)") - } - newNaming = remaining[i+1] - i++ - } + settings, err := parseTargetSettingFlags(remaining) + if err != nil { + return err } cfg, err := config.LoadProject(root) @@ -360,13 +355,55 @@ func targetInfoProject(name string, args []string, root string) error { if filterOpts.hasUpdates() { start := time.Now() entry := &cfg.Targets[targetIdx] - s := entry.EnsureSkills() - changes, fErr := applyFilterUpdates(&s.Include, &s.Exclude, filterOpts) - if fErr != nil { - return fErr + var changes []string + mutated := false + + if filterOpts.Skills.hasUpdates() { + s := entry.EnsureSkills() + skillChanges, fErr := applyFilterUpdates(&s.Include, &s.Exclude, filterOpts.Skills) + if fErr != nil { + return fErr + } + changes = append(changes, skillChanges...) + mutated = true } - if err := cfg.Save(root); err != nil { - return err + + if filterOpts.Agents.hasUpdates() { + agentBuilder, buildErr := targetsummary.NewProjectBuilder(root) + if buildErr != nil { + return buildErr + } + agentSummary, buildErr := agentBuilder.ProjectTarget(*entry) + if buildErr != nil { + return buildErr + } + if agentSummary == nil { + return fmt.Errorf("target '%s' does not have an agents path", name) + } + if agentSummary.Mode == "symlink" { + return fmt.Errorf("target '%s' agent include/exclude filters are ignored in symlink mode; use --agent-mode merge or --agent-mode copy first", name) + } + + ac := entry.AgentsConfig() + include := append([]string(nil), ac.Include...) + exclude := append([]string(nil), ac.Exclude...) + agentChanges, fErr := applyFilterUpdates(&include, &exclude, filterOpts.Agents) + if fErr != nil { + return fErr + } + if len(agentChanges) > 0 { + a := entry.EnsureAgents() + a.Include = include + a.Exclude = exclude + mutated = true + } + changes = append(changes, scopeFilterChanges("agents", agentChanges)...) + } + + if mutated { + if err := cfg.Save(root); err != nil { + return err + } } for _, change := range changes { ui.Success("%s: %s", name, change) @@ -385,12 +422,16 @@ func targetInfoProject(name string, args []string, root string) error { return nil } - if newMode != "" { - return updateTargetModeProject(cfg, targetIdx, newMode, root) + if settings.SkillMode != "" { + return updateTargetModeProject(cfg, targetIdx, settings.SkillMode, root) + } + + if settings.AgentMode != "" { + return updateTargetAgentModeProject(cfg, targetIdx, settings.AgentMode, root) } - if newNaming != "" { - return updateTargetNamingProject(cfg, targetIdx, newNaming, root) + if settings.Naming != "" { + return updateTargetNamingProject(cfg, targetIdx, settings.Naming, root) } targets, err := config.ResolveProjectTargets(root, cfg) @@ -405,6 +446,14 @@ func targetInfoProject(name string, args []string, root string) error { targetEntry := cfg.Targets[targetIdx] sourcePath := filepath.Join(root, ".skillshare", "skills") + agentBuilder, err := targetsummary.NewProjectBuilder(root) + if err != nil { + return err + } + agentSummary, err := agentBuilder.ProjectTarget(targetEntry) + if err != nil { + return err + } sc := targetEntry.SkillsConfig() mode := sc.Mode @@ -439,6 +488,7 @@ func targetInfoProject(name string, args []string, root string) error { fmt.Printf(" Include: %s\n", formatFilterList(sc.Include)) fmt.Printf(" Exclude: %s\n", formatFilterList(sc.Exclude)) + printTargetAgentSection(agentSummary) return nil } @@ -464,6 +514,38 @@ func updateTargetModeProject(cfg *config.ProjectConfig, idx int, newMode string, return nil } +func updateTargetAgentModeProject(cfg *config.ProjectConfig, idx int, newMode string, root string) error { + if newMode != "merge" && newMode != "symlink" && newMode != "copy" { + return fmt.Errorf("invalid agent mode '%s'. Use 'merge', 'symlink', or 'copy'", newMode) + } + + entry := &cfg.Targets[idx] + agentBuilder, err := targetsummary.NewProjectBuilder(root) + if err != nil { + return err + } + agentSummary, err := agentBuilder.ProjectTarget(*entry) + if err != nil { + return err + } + if agentSummary == nil { + return fmt.Errorf("target '%s' does not have an agents path", entry.Name) + } + + oldMode := agentSummary.Mode + entry.EnsureAgents().Mode = newMode + if err := cfg.Save(root); err != nil { + return err + } + + ui.Success("Changed %s agent mode: %s -> %s", entry.Name, oldMode, newMode) + if newMode == "symlink" && (len(agentSummary.Include) > 0 || len(agentSummary.Exclude) > 0) { + ui.Warning("Agent include/exclude filters are ignored in symlink mode") + } + ui.Info("Run 'skillshare sync' to apply the new mode") + return nil +} + func updateTargetNamingProject(cfg *config.ProjectConfig, idx int, newNaming string, root string) error { if !config.IsValidTargetNaming(newNaming) { return fmt.Errorf("invalid target naming '%s'. Use 'flat' or 'standard'", newNaming) diff --git a/internal/config/config.go b/internal/config/config.go index af302e5d..6d36b9f5 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -132,6 +132,15 @@ func (tc *TargetConfig) EnsureSkills() *ResourceTargetConfig { return tc.Skills } +// EnsureAgents returns the Agents sub-key, creating it if nil. +// Use this before writing to Agents fields. +func (tc *TargetConfig) EnsureAgents() *ResourceTargetConfig { + if tc.Agents == nil { + tc.Agents = &ResourceTargetConfig{} + } + return tc.Agents +} + // migrateTargetConfigs moves legacy flat fields into skills: sub-key. // Returns true if any target was migrated. func migrateTargetConfigs(targets map[string]TargetConfig) bool { diff --git a/internal/config/project.go b/internal/config/project.go index 715c34c3..4e585862 100644 --- a/internal/config/project.go +++ b/internal/config/project.go @@ -182,6 +182,14 @@ func (t *ProjectTargetEntry) EnsureSkills() *ResourceTargetConfig { return t.Skills } +// EnsureAgents returns the Agents sub-key, creating it if nil. +func (t *ProjectTargetEntry) EnsureAgents() *ResourceTargetConfig { + if t.Agents == nil { + t.Agents = &ResourceTargetConfig{} + } + return t.Agents +} + // ResourceEntry represents a remote resource entry in config (shared by global and project). // Used for both skills and agents. type ResourceEntry struct { @@ -426,7 +434,7 @@ func ResolveProjectTargets(projectRoot string, cfg *ProjectConfig) (map[string]T absPath = filepath.Join(projectRoot, filepath.FromSlash(targetPath)) } - resolved[name] = TargetConfig{ + tc := TargetConfig{ defaultTargetNaming: cfg.TargetNaming, Skills: &ResourceTargetConfig{ Path: absPath, @@ -436,6 +444,32 @@ func ResolveProjectTargets(projectRoot string, cfg *ProjectConfig) (map[string]T Exclude: append([]string(nil), sc.Exclude...), }, } + + // Resolve Agents sub-key: from entry config or builtin defaults. + ac := entry.AgentsConfig() + agentPath := strings.TrimSpace(ac.Path) + if agentPath == "" { + if builtin, ok := ProjectAgentTargets()[name]; ok { + agentPath = builtin.Path + } + } + if agentPath != "" { + absAgentPath := agentPath + if utils.HasTildePrefix(absAgentPath) { + absAgentPath = expandPath(absAgentPath) + } + if !filepath.IsAbs(agentPath) { + absAgentPath = filepath.Join(projectRoot, filepath.FromSlash(agentPath)) + } + tc.Agents = &ResourceTargetConfig{ + Path: absAgentPath, + Mode: ac.Mode, + Include: append([]string(nil), ac.Include...), + Exclude: append([]string(nil), ac.Exclude...), + } + } + + resolved[name] = tc } return resolved, nil diff --git a/internal/server/handler_helpers_test.go b/internal/server/handler_helpers_test.go index a7c04a89..5b01b3b3 100644 --- a/internal/server/handler_helpers_test.go +++ b/internal/server/handler_helpers_test.go @@ -3,6 +3,7 @@ package server import ( "os" "path/filepath" + "strings" "testing" "skillshare/internal/config" @@ -95,3 +96,28 @@ func addSkillMeta(t *testing.T, sourceDir, name, source string) { t.Fatalf("addSkillMeta: %v", err) } } + +func addAgent(t *testing.T, agentsDir, relPath string) { + t.Helper() + agentPath := filepath.Join(agentsDir, filepath.FromSlash(relPath)) + if err := os.MkdirAll(filepath.Dir(agentPath), 0o755); err != nil { + t.Fatalf("create agent dir: %v", err) + } + if err := os.WriteFile(agentPath, []byte("---\nname: "+strings.TrimSuffix(filepath.Base(relPath), ".md")+"\n---\n# agent"), 0o644); err != nil { + t.Fatalf("write agent: %v", err) + } +} + +func addAgentMeta(t *testing.T, agentsDir, relPath, source string) { + t.Helper() + store := install.LoadMetadataOrNew(agentsDir) + key := strings.TrimSuffix(filepath.ToSlash(relPath), ".md") + store.Set(key, &install.MetadataEntry{ + Source: source, + Kind: install.MetadataKindAgent, + Subdir: filepath.ToSlash(relPath), + }) + if err := store.Save(agentsDir); err != nil { + t.Fatalf("addAgentMeta: %v", err) + } +} diff --git a/internal/server/handler_overview.go b/internal/server/handler_overview.go index 0a7da3ad..2cab0557 100644 --- a/internal/server/handler_overview.go +++ b/internal/server/handler_overview.go @@ -8,6 +8,7 @@ import ( "skillshare/internal/git" "skillshare/internal/install" + "skillshare/internal/resource" "skillshare/internal/sync" "skillshare/internal/utils" versioncheck "skillshare/internal/version" @@ -24,6 +25,10 @@ func (s *Server) handleOverview(w http.ResponseWriter, r *http.Request) { s.mu.RLock() source := s.cfg.Source agentsSource := s.agentsSource() + extrasSource := s.cfg.ExtrasSource + if s.IsProjectMode() { + extrasSource = filepath.Join(s.projectRoot, ".skillshare", "extras") + } cfgMode := s.cfg.Mode targetCount := len(s.cfg.Targets) projectRoot := s.projectRoot @@ -58,11 +63,8 @@ func (s *Server) handleOverview(w http.ResponseWriter, r *http.Request) { // Count agents agentCount := 0 if agentsSource != "" { - agentEntries, _ := os.ReadDir(agentsSource) - for _, e := range agentEntries { - if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") { - agentCount++ - } + if agents, discoverErr := (resource.AgentKind{}).Discover(agentsSource); discoverErr == nil { + agentCount = len(agents) } } @@ -77,6 +79,12 @@ func (s *Server) handleOverview(w http.ResponseWriter, r *http.Request) { "trackedRepos": trackedRepos, "isProjectMode": isProjectMode, } + if agentsSource != "" { + resp["agentsSource"] = agentsSource + } + if extrasSource != "" { + resp["extrasSource"] = extrasSource + } if isProjectMode { resp["projectRoot"] = projectRoot } diff --git a/internal/server/handler_overview_test.go b/internal/server/handler_overview_test.go index e744d73c..698b0eb2 100644 --- a/internal/server/handler_overview_test.go +++ b/internal/server/handler_overview_test.go @@ -4,6 +4,8 @@ import ( "encoding/json" "net/http" "net/http/httptest" + "os" + "path/filepath" "testing" ) @@ -50,6 +52,34 @@ func TestHandleOverview_WithSkills(t *testing.T) { } } +func TestHandleOverview_AgentCountIncludesNestedAgents(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(filepath.Join(agentsDir, "demo"), 0755); err != nil { + t.Fatalf("mkdir agents dir: %v", err) + } + if err := os.WriteFile(filepath.Join(agentsDir, "top-level.md"), []byte("# Top"), 0644); err != nil { + t.Fatalf("write top-level agent: %v", err) + } + if err := os.WriteFile(filepath.Join(agentsDir, "demo", "nested-agent.md"), []byte("# Nested"), 0644); err != nil { + t.Fatalf("write nested agent: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/overview", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp map[string]any + json.Unmarshal(rr.Body.Bytes(), &resp) + if resp["agentCount"].(float64) != 2 { + t.Errorf("expected 2 agents including nested entries, got %v", resp["agentCount"]) + } +} + func TestHandleOverview_ProjectMode(t *testing.T) { tmp := t.TempDir() s, _ := newTestServer(t) diff --git a/internal/server/handler_resources_agents_test.go b/internal/server/handler_resources_agents_test.go new file mode 100644 index 00000000..50b7cacb --- /dev/null +++ b/internal/server/handler_resources_agents_test.go @@ -0,0 +1,229 @@ +package server + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "os/exec" + "path/filepath" + "strings" + "testing" + + "skillshare/internal/install" + "skillshare/internal/trash" +) + +func TestHandleGetSkill_AgentKind(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("create agents dir: %v", err) + } + addAgent(t, agentsDir, "demo/reviewer.md") + if err := os.WriteFile(filepath.Join(agentsDir, ".agentignore"), []byte("demo/reviewer.md\n"), 0o644); err != nil { + t.Fatalf("write .agentignore: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/resources/demo__reviewer.md?kind=agent", nil) + req.SetPathValue("name", "demo__reviewer.md") + rr := httptest.NewRecorder() + s.handleGetSkill(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Resource struct { + Kind string `json:"kind"` + RelPath string `json:"relPath"` + Disabled bool `json:"disabled"` + } `json:"resource"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("decode response: %v", err) + } + if resp.Resource.Kind != "agent" { + t.Fatalf("expected kind=agent, got %q", resp.Resource.Kind) + } + if resp.Resource.RelPath != "demo/reviewer.md" { + t.Fatalf("expected relPath demo/reviewer.md, got %q", resp.Resource.RelPath) + } + if !resp.Resource.Disabled { + t.Fatal("expected agent detail to report disabled=true") + } +} + +func TestHandleListSkills_AgentDisabled(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("create agents dir: %v", err) + } + addAgent(t, agentsDir, "demo/reviewer.md") + if err := os.WriteFile(filepath.Join(agentsDir, ".agentignore"), []byte("demo/reviewer.md\n"), 0o644); err != nil { + t.Fatalf("write .agentignore: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/resources?kind=agent", nil) + rr := httptest.NewRecorder() + s.handleListSkills(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Resources []struct { + FlatName string `json:"flatName"` + Disabled bool `json:"disabled"` + } `json:"resources"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("decode response: %v", err) + } + if len(resp.Resources) != 1 { + t.Fatalf("expected 1 agent, got %d", len(resp.Resources)) + } + if resp.Resources[0].FlatName != "demo__reviewer.md" { + t.Fatalf("expected demo__reviewer.md, got %q", resp.Resources[0].FlatName) + } + if !resp.Resources[0].Disabled { + t.Fatal("expected disabled=true in agent list response") + } +} + +func TestHandleToggleSkill_AgentKind(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("create agents dir: %v", err) + } + addAgent(t, agentsDir, "demo/reviewer.md") + + disableReq := httptest.NewRequest(http.MethodPost, "/api/resources/demo__reviewer.md/disable?kind=agent", nil) + disableReq.SetPathValue("name", "demo__reviewer.md") + disableRR := httptest.NewRecorder() + s.handleDisableSkill(disableRR, disableReq) + + if disableRR.Code != http.StatusOK { + t.Fatalf("disable expected 200, got %d: %s", disableRR.Code, disableRR.Body.String()) + } + + data, err := os.ReadFile(filepath.Join(agentsDir, ".agentignore")) + if err != nil { + t.Fatalf("read .agentignore: %v", err) + } + if !strings.Contains(string(data), "demo/reviewer.md") { + t.Fatalf("expected .agentignore to contain demo/reviewer.md, got %q", string(data)) + } + + enableReq := httptest.NewRequest(http.MethodPost, "/api/resources/demo__reviewer.md/enable?kind=agent", nil) + enableReq.SetPathValue("name", "demo__reviewer.md") + enableRR := httptest.NewRecorder() + s.handleEnableSkill(enableRR, enableReq) + + if enableRR.Code != http.StatusOK { + t.Fatalf("enable expected 200, got %d: %s", enableRR.Code, enableRR.Body.String()) + } + + data, err = os.ReadFile(filepath.Join(agentsDir, ".agentignore")) + if err != nil { + t.Fatalf("read .agentignore after enable: %v", err) + } + if strings.Contains(string(data), "demo/reviewer.md") { + t.Fatalf("expected demo/reviewer.md to be removed from .agentignore, got %q", string(data)) + } +} + +func TestHandleUninstallSkill_AgentKind(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("create agents dir: %v", err) + } + addAgent(t, agentsDir, "demo/reviewer.md") + + store := install.LoadMetadataOrNew(agentsDir) + store.Set("demo/reviewer", &install.MetadataEntry{ + Source: "file:///tmp/reviewer", + Kind: install.MetadataKindAgent, + Subdir: "demo/reviewer.md", + }) + if err := store.Save(agentsDir); err != nil { + t.Fatalf("save metadata: %v", err) + } + s.agentsStore = store + + req := httptest.NewRequest(http.MethodDelete, "/api/resources/demo__reviewer.md?kind=agent", nil) + req.SetPathValue("name", "demo__reviewer.md") + rr := httptest.NewRecorder() + s.handleUninstallSkill(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + if _, err := os.Stat(filepath.Join(agentsDir, "demo", "reviewer.md")); !os.IsNotExist(err) { + t.Fatalf("expected agent file removed, stat err=%v", err) + } + if entry := trash.FindByName(s.agentTrashBase(), "demo/reviewer"); entry == nil { + t.Fatal("expected agent to be moved to agent trash") + } + if got := s.agentsStore.GetByPath("demo/reviewer"); got != nil { + t.Fatalf("expected agent metadata removed, got %+v", got) + } +} + +func TestUpdateSingleByKind_Agent(t *testing.T) { + s, _ := newTestServer(t) + agentsDir := s.agentsSource() + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("create agents dir: %v", err) + } + addAgent(t, agentsDir, "demo/reviewer.md") + + repoDir := t.TempDir() + initGitRepo(t, repoDir) + addAgent(t, repoDir, "demo/reviewer.md") + if err := os.WriteFile(filepath.Join(repoDir, "demo", "reviewer.md"), []byte("# updated agent\n"), 0o644); err != nil { + t.Fatalf("write repo agent: %v", err) + } + for _, args := range [][]string{{"add", "."}, {"commit", "-m", "add agent"}} { + cmd := exec.Command("git", args...) + cmd.Dir = repoDir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git %v failed: %s %v", args, out, err) + } + } + + source := "file://" + filepath.ToSlash(repoDir) + "//demo/reviewer.md" + store := install.LoadMetadataOrNew(agentsDir) + store.Set("demo/reviewer", &install.MetadataEntry{ + Source: source, + Kind: install.MetadataKindAgent, + Subdir: "demo/reviewer.md", + Version: "stale-version", + }) + if err := store.Save(agentsDir); err != nil { + t.Fatalf("save metadata: %v", err) + } + s.agentsStore = store + + result := s.updateSingleByKind("demo__reviewer.md", "agent", false, true) + if result.Action != "updated" { + t.Fatalf("expected updated, got %+v", result) + } + if result.Kind != "agent" { + t.Fatalf("expected kind=agent, got %+v", result) + } + + data, err := os.ReadFile(filepath.Join(agentsDir, "demo", "reviewer.md")) + if err != nil { + t.Fatalf("read updated agent: %v", err) + } + if string(data) != "# updated agent\n" { + t.Fatalf("expected updated agent content, got %q", string(data)) + } +} diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index 3d3239ac..bf1e205a 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -101,6 +101,7 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { FlatName: d.FlatName, RelPath: d.RelPath, SourcePath: d.SourcePath, + Disabled: d.Disabled, } // Read from centralized agents metadata store @@ -130,81 +131,88 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { s.mu.RUnlock() name := r.PathValue("name") - - // Find the skill by flat name or base name - discovered, err := sync.DiscoverSourceSkillsAll(source) - if err != nil { - writeError(w, http.StatusInternalServerError, err.Error()) + kind := r.URL.Query().Get("kind") + if kind != "" && kind != "skill" && kind != "agent" { + writeError(w, http.StatusBadRequest, "invalid kind: "+kind) return } - for _, d := range discovered { - baseName := filepath.Base(d.SourcePath) - if d.FlatName != name && baseName != name { - continue - } - - item := skillItem{ - Name: baseName, - Kind: "skill", - FlatName: d.FlatName, - RelPath: d.RelPath, - SourcePath: d.SourcePath, - IsInRepo: d.IsInRepo, - Targets: d.Targets, - Disabled: d.Disabled, + // Find the skill by flat name or base name + if kind != "agent" { + discovered, err := sync.DiscoverSourceSkillsAll(source) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return } - if entry := s.skillsStore.GetByPath(d.RelPath); entry != nil { - if !entry.InstalledAt.IsZero() { - item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) + for _, d := range discovered { + baseName := filepath.Base(d.SourcePath) + if d.FlatName != name && baseName != name { + continue } - item.Source = entry.Source - item.Type = entry.Type - item.RepoURL = entry.RepoURL - item.Version = entry.Version - item.Branch = entry.Branch - } - enrichSkillBranch(&item) - - // Read SKILL.md content - skillMdContent := "" - skillMdPath := filepath.Join(d.SourcePath, "SKILL.md") - if data, err := os.ReadFile(skillMdPath); err == nil { - skillMdContent = string(data) - } - // List all files in the skill directory - files := make([]string, 0) - filepath.Walk(d.SourcePath, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil + item := skillItem{ + Name: baseName, + Kind: "skill", + FlatName: d.FlatName, + RelPath: d.RelPath, + SourcePath: d.SourcePath, + IsInRepo: d.IsInRepo, + Targets: d.Targets, + Disabled: d.Disabled, } - if info.IsDir() && utils.IsHidden(info.Name()) { - return filepath.SkipDir + + if entry := s.skillsStore.GetByPath(d.RelPath); entry != nil { + if !entry.InstalledAt.IsZero() { + item.InstalledAt = entry.InstalledAt.Format(time.RFC3339) + } + item.Source = entry.Source + item.Type = entry.Type + item.RepoURL = entry.RepoURL + item.Version = entry.Version + item.Branch = entry.Branch } - if !info.IsDir() { - rel, _ := filepath.Rel(d.SourcePath, path) - // Normalize separators - rel = strings.ReplaceAll(rel, "\\", "/") - files = append(files, rel) + enrichSkillBranch(&item) + + // Read SKILL.md content + skillMdContent := "" + skillMdPath := filepath.Join(d.SourcePath, "SKILL.md") + if data, err := os.ReadFile(skillMdPath); err == nil { + skillMdContent = string(data) } - return nil - }) - writeJSON(w, map[string]any{ - "resource": item, - "skillMdContent": skillMdContent, - "files": files, - }) - return + // List all files in the skill directory + files := make([]string, 0) + filepath.Walk(d.SourcePath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if info.IsDir() && utils.IsHidden(info.Name()) { + return filepath.SkipDir + } + if !info.IsDir() { + rel, _ := filepath.Rel(d.SourcePath, path) + // Normalize separators + rel = strings.ReplaceAll(rel, "\\", "/") + files = append(files, rel) + } + return nil + }) + + writeJSON(w, map[string]any{ + "resource": item, + "skillMdContent": skillMdContent, + "files": files, + }) + return + } } // Fallback: check agents source (recursive — supports --into subdirectories) - if agentsSource != "" { + if kind != "skill" && agentsSource != "" { agentDiscovered, _ := resource.AgentKind{}.Discover(agentsSource) for _, d := range agentDiscovered { - if d.FlatName != name && d.Name != name { + if !matchesAgentName(d, name) { continue } @@ -219,6 +227,7 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { FlatName: d.FlatName, RelPath: d.RelPath, SourcePath: d.SourcePath, + Disabled: d.Disabled, } agentKey := strings.TrimSuffix(d.RelPath, ".md") @@ -403,6 +412,47 @@ func (s *Server) handleUninstallSkill(w http.ResponseWriter, r *http.Request) { defer s.mu.Unlock() name := r.PathValue("name") + kind := r.URL.Query().Get("kind") + if kind != "" && kind != "skill" && kind != "agent" { + writeError(w, http.StatusBadRequest, "invalid kind: "+kind) + return + } + + if kind == "agent" { + agentsSource := s.agentsSource() + if agentsSource == "" { + writeError(w, http.StatusNotFound, "agent not found: "+name) + return + } + agent, err := resolveAgentResource(agentsSource, name) + if err != nil { + writeError(w, http.StatusNotFound, err.Error()) + return + } + + displayName := agentMetaKey(agent.RelPath) + legacySidecar := filepath.Join(filepath.Dir(agent.SourcePath), filepath.Base(displayName)+".skillshare-meta.json") + if _, err := trash.MoveAgentToTrash(agent.SourcePath, legacySidecar, displayName, s.agentTrashBase()); err != nil { + writeError(w, http.StatusInternalServerError, "failed to trash agent: "+err.Error()) + return + } + + if s.agentsStore != nil { + s.agentsStore.Remove(displayName) + if err := s.agentsStore.Save(agentsSource); err != nil { + log.Printf("warning: failed to save agent metadata after uninstall: %v", err) + } + } + + s.writeOpsLog("uninstall", "ok", start, map[string]any{ + "name": displayName, + "type": "agent", + "scope": "ui", + }, "") + + writeJSON(w, map[string]any{"success": true, "name": displayName, "movedToTrash": true}) + return + } // Find skill path discovered, err := sync.DiscoverSourceSkills(s.cfg.Source) diff --git a/internal/server/handler_sync_matrix.go b/internal/server/handler_sync_matrix.go index c918f733..d1ef390d 100644 --- a/internal/server/handler_sync_matrix.go +++ b/internal/server/handler_sync_matrix.go @@ -97,12 +97,16 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) // Snapshot config under RLock, then release before I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() + targets := s.cloneTargets() s.mu.RUnlock() var body struct { - Target string `json:"target"` - Include []string `json:"include"` - Exclude []string `json:"exclude"` + Target string `json:"target"` + Include []string `json:"include"` + Exclude []string `json:"exclude"` + AgentInclude []string `json:"agent_include"` + AgentExclude []string `json:"agent_exclude"` } if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON: "+err.Error()) @@ -113,7 +117,7 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) return } - // Validate patterns before discovering skills + // Validate skill patterns if _, err := ssync.FilterSkills(nil, body.Include, nil); err != nil { writeError(w, http.StatusBadRequest, err.Error()) return @@ -122,6 +126,15 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) writeError(w, http.StatusBadRequest, err.Error()) return } + // Validate agent patterns + if _, err := ssync.FilterSkills(nil, body.AgentInclude, nil); err != nil { + writeError(w, http.StatusBadRequest, "invalid agent include pattern: "+err.Error()) + return + } + if _, err := ssync.FilterSkills(nil, nil, body.AgentExclude); err != nil { + writeError(w, http.StatusBadRequest, "invalid agent exclude pattern: "+err.Error()) + return + } skills, err := ssync.DiscoverSourceSkills(source) if err != nil { @@ -140,5 +153,32 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) }) } + // Agents — resolve path from config or builtin defaults + target, ok := targets[body.Target] + if ok && agentsSource != "" { + ac := target.AgentsConfig() + agentPath := ac.Path + if agentPath == "" { + builtinAgents := config.DefaultAgentTargets() + if builtin, found := builtinAgents[body.Target]; found { + agentPath = builtin.Path + } + } + if agentPath != "" { + discovered, _ := resource.AgentKind{}.Discover(agentsSource) + agents := resource.ActiveAgents(discovered) + for _, agent := range agents { + status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, body.Target, body.AgentInclude, body.AgentExclude) + entries = append(entries, syncMatrixEntry{ + Skill: agent.FlatName, + Target: body.Target, + Status: status, + Reason: reason, + Kind: "agent", + }) + } + } + } + writeJSON(w, map[string]any{"entries": entries}) } diff --git a/internal/server/handler_sync_matrix_test.go b/internal/server/handler_sync_matrix_test.go index 462dea62..951d84d7 100644 --- a/internal/server/handler_sync_matrix_test.go +++ b/internal/server/handler_sync_matrix_test.go @@ -4,6 +4,7 @@ import ( "encoding/json" "net/http" "net/http/httptest" + "os" "path/filepath" "strings" "testing" @@ -180,3 +181,114 @@ func TestHandleSyncMatrixPreview_MissingTarget(t *testing.T) { t.Errorf("expected 400, got %d", rr.Code) } } + +func TestHandleSyncMatrixPreview_IncludesAgents(t *testing.T) { + home := filepath.Join(t.TempDir(), "home") + os.MkdirAll(home, 0755) + t.Setenv("HOME", home) + + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + addSkill(t, sourceDir, "my-skill") + + // Add agents to the agents source directory + agentsSource := s.cfg.EffectiveAgentsSource() + addAgentFile(t, agentsSource, "code-reviewer.md") + addAgentFile(t, agentsSource, "draft-helper.md") + + body := `{"target":"claude","include":[],"exclude":[],"agent_include":[],"agent_exclude":["draft-*"]}` + req := httptest.NewRequest(http.MethodPost, "/api/sync-matrix/preview", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Entries []struct { + Skill string `json:"skill"` + Status string `json:"status"` + Kind string `json:"kind"` + } `json:"entries"` + } + json.Unmarshal(rr.Body.Bytes(), &resp) + + // Should have 1 skill + 2 agents = 3 entries + if len(resp.Entries) != 3 { + t.Fatalf("expected 3 entries, got %d", len(resp.Entries)) + } + + statusMap := map[string]string{} + kindMap := map[string]string{} + for _, e := range resp.Entries { + statusMap[e.Skill] = e.Status + kindMap[e.Skill] = e.Kind + } + + // Skill should be synced + if statusMap["my-skill"] != "synced" { + t.Errorf("my-skill: expected synced, got %q", statusMap["my-skill"]) + } + if kindMap["my-skill"] != "" { + t.Errorf("my-skill kind: expected empty, got %q", kindMap["my-skill"]) + } + + // code-reviewer agent should be synced + if statusMap["code-reviewer.md"] != "synced" { + t.Errorf("code-reviewer.md: expected synced, got %q", statusMap["code-reviewer.md"]) + } + if kindMap["code-reviewer.md"] != "agent" { + t.Errorf("code-reviewer.md kind: expected agent, got %q", kindMap["code-reviewer.md"]) + } + + // draft-helper agent should be excluded + if statusMap["draft-helper.md"] != "excluded" { + t.Errorf("draft-helper.md: expected excluded, got %q", statusMap["draft-helper.md"]) + } +} + +func TestHandleSyncMatrixPreview_NoAgentsWhenNoAgentPath(t *testing.T) { + // custom-tool has no agent path in builtin targets + tgtPath := filepath.Join(t.TempDir(), "custom-skills") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"custom-tool": tgtPath}) + addSkill(t, sourceDir, "my-skill") + + // Add agents to the agents source + agentsSource := s.cfg.EffectiveAgentsSource() + addAgentFile(t, agentsSource, "reviewer.md") + + body := `{"target":"custom-tool","include":[],"exclude":[]}` + req := httptest.NewRequest(http.MethodPost, "/api/sync-matrix/preview", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Entries []struct { + Skill string `json:"skill"` + Kind string `json:"kind"` + } `json:"entries"` + } + json.Unmarshal(rr.Body.Bytes(), &resp) + + // Should have only 1 skill, no agents + if len(resp.Entries) != 1 { + t.Fatalf("expected 1 entry (skill only, no agents), got %d", len(resp.Entries)) + } + if resp.Entries[0].Kind == "agent" { + t.Error("expected no agent entries for target without agent path") + } +} + +func TestHandleSyncMatrixPreview_InvalidAgentPattern(t *testing.T) { + s, _ := newTestServer(t) + body := `{"target":"claude","include":[],"exclude":[],"agent_include":["[unclosed"]}` + req := httptest.NewRequest(http.MethodPost, "/api/sync-matrix/preview", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusBadRequest { + t.Errorf("expected 400 for invalid agent pattern, got %d", rr.Code) + } +} diff --git a/internal/server/handler_targets.go b/internal/server/handler_targets.go index 0c48d4fd..43469ff9 100644 --- a/internal/server/handler_targets.go +++ b/internal/server/handler_targets.go @@ -319,6 +319,8 @@ func (s *Server) handleUpdateTarget(w http.ResponseWriter, r *http.Request) { Mode *string `json:"mode"` TargetNaming *string `json:"target_naming"` AgentMode *string `json:"agent_mode"` + AgentInclude *[]string `json:"agent_include"` + AgentExclude *[]string `json:"agent_exclude"` } if err := json.NewDecoder(r.Body).Decode(&body); err != nil { writeError(w, http.StatusBadRequest, "invalid JSON body") @@ -371,6 +373,21 @@ func (s *Server) handleUpdateTarget(w http.ResponseWriter, r *http.Request) { } } + if body.AgentInclude != nil { + if _, err := ssync.FilterSkills(nil, *body.AgentInclude, nil); err != nil { + writeError(w, http.StatusBadRequest, "invalid agent include pattern: "+err.Error()) + return + } + target.EnsureAgents().Include = *body.AgentInclude + } + if body.AgentExclude != nil { + if _, err := ssync.FilterSkills(nil, nil, *body.AgentExclude); err != nil { + writeError(w, http.StatusBadRequest, "invalid agent exclude pattern: "+err.Error()) + return + } + target.EnsureAgents().Exclude = *body.AgentExclude + } + s.cfg.Targets[name] = target // In project mode, also update the project config @@ -393,6 +410,12 @@ func (s *Server) handleUpdateTarget(w http.ResponseWriter, r *http.Request) { if body.AgentMode != nil { s.projectCfg.Targets[i].EnsureAgents().Mode = *body.AgentMode } + if body.AgentInclude != nil { + s.projectCfg.Targets[i].EnsureAgents().Include = *body.AgentInclude + } + if body.AgentExclude != nil { + s.projectCfg.Targets[i].EnsureAgents().Exclude = *body.AgentExclude + } break } } @@ -403,7 +426,7 @@ func (s *Server) handleUpdateTarget(w http.ResponseWriter, r *http.Request) { return } - hasFilter := body.Include != nil || body.Exclude != nil + hasFilter := body.Include != nil || body.Exclude != nil || body.AgentInclude != nil || body.AgentExclude != nil hasSetting := body.Mode != nil || body.TargetNaming != nil || body.AgentMode != nil action := "filter" if hasSetting && hasFilter { diff --git a/internal/server/handler_targets_agents_test.go b/internal/server/handler_targets_agents_test.go new file mode 100644 index 00000000..d6202c2a --- /dev/null +++ b/internal/server/handler_targets_agents_test.go @@ -0,0 +1,213 @@ +package server + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "skillshare/internal/config" +) + +type targetAgentResponse struct { + Name string `json:"name"` + AgentPath string `json:"agentPath"` + AgentMode string `json:"agentMode"` + AgentInclude []string `json:"agentInclude"` + AgentExclude []string `json:"agentExclude"` + AgentLinkedCount *int `json:"agentLinkedCount"` + AgentExpectedCount *int `json:"agentExpectedCount"` +} + +func TestHandleListTargets_IncludesGlobalBuiltinAgents(t *testing.T) { + home := filepath.Join(t.TempDir(), "home") + if err := os.MkdirAll(home, 0755); err != nil { + t.Fatalf("mkdir home: %v", err) + } + t.Setenv("HOME", home) + + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + agentSource := s.cfg.EffectiveAgentsSource() + agentFile := addAgentFile(t, agentSource, "reviewer.md") + agentTarget := filepath.Join(home, ".claude", "agents") + addAgentLink(t, agentTarget, "reviewer.md", agentFile) + + target := fetchTargetByName(t, s, "claude") + if target.AgentPath != agentTarget { + t.Fatalf("agent path = %q, want %q", target.AgentPath, agentTarget) + } + if target.AgentMode != "merge" { + t.Fatalf("agent mode = %q, want merge", target.AgentMode) + } + if target.AgentLinkedCount == nil || *target.AgentLinkedCount != 1 { + t.Fatalf("agent linked = %v, want 1", target.AgentLinkedCount) + } + if target.AgentExpectedCount == nil || *target.AgentExpectedCount != 1 { + t.Fatalf("agent expected = %v, want 1", target.AgentExpectedCount) + } +} + +func TestHandleListTargets_IncludesProjectBuiltinAgents(t *testing.T) { + s, projectRoot := newProjectTargetServer(t, []config.ProjectTargetEntry{{Name: "claude"}}) + + agentFile := addAgentFile(t, filepath.Join(projectRoot, ".skillshare", "agents"), "reviewer.md") + agentTarget := filepath.Join(projectRoot, ".claude", "agents") + addAgentLink(t, agentTarget, "reviewer.md", agentFile) + + target := fetchTargetByName(t, s, "claude") + if target.AgentPath != agentTarget { + t.Fatalf("agent path = %q, want %q", target.AgentPath, agentTarget) + } + if target.AgentMode != "merge" { + t.Fatalf("agent mode = %q, want merge", target.AgentMode) + } + if target.AgentLinkedCount == nil || *target.AgentLinkedCount != 1 { + t.Fatalf("agent linked = %v, want 1", target.AgentLinkedCount) + } + if target.AgentExpectedCount == nil || *target.AgentExpectedCount != 1 { + t.Fatalf("agent expected = %v, want 1", target.AgentExpectedCount) + } +} + +func TestHandleListTargets_CustomAgentPathOverridesBuiltin(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + customAgentPath := filepath.Join(t.TempDir(), "custom-agents-target") + if err := os.MkdirAll(customAgentPath, 0755); err != nil { + t.Fatalf("mkdir custom target: %v", err) + } + + cfg, err := config.Load() + if err != nil { + t.Fatalf("load config: %v", err) + } + cfgTarget := cfg.Targets["claude"] + cfgTarget.Agents = &config.ResourceTargetConfig{ + Path: customAgentPath, + Mode: "copy", + Include: []string{"review-*"}, + Exclude: []string{"draft-*"}, + } + cfg.Targets["claude"] = cfgTarget + if err := cfg.Save(); err != nil { + t.Fatalf("save config: %v", err) + } + + addAgentFile(t, cfg.EffectiveAgentsSource(), "review-alpha.md") + targetResp := fetchTargetByName(t, s, "claude") + if targetResp.AgentPath != customAgentPath { + t.Fatalf("agent path = %q, want %q", targetResp.AgentPath, customAgentPath) + } + if targetResp.AgentMode != "copy" { + t.Fatalf("agent mode = %q, want copy", targetResp.AgentMode) + } + if got := targetResp.AgentInclude; len(got) != 1 || got[0] != "review-*" { + t.Fatalf("agent include = %v, want [review-*]", got) + } + if got := targetResp.AgentExclude; len(got) != 1 || got[0] != "draft-*" { + t.Fatalf("agent exclude = %v, want [draft-*]", got) + } +} + +func TestHandleListTargets_OmitsAgentsForUnsupportedTarget(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "custom-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"custom-tool": tgtPath}) + + target := fetchTargetByName(t, s, "custom-tool") + if target.AgentPath != "" { + t.Fatalf("expected empty agent path, got %q", target.AgentPath) + } + if target.AgentLinkedCount != nil { + t.Fatalf("expected nil agent linked count, got %v", *target.AgentLinkedCount) + } + if target.AgentExpectedCount != nil { + t.Fatalf("expected nil agent expected count, got %v", *target.AgentExpectedCount) + } +} + +func fetchTargetByName(t *testing.T, s *Server, name string) targetAgentResponse { + t.Helper() + + req := httptest.NewRequest(http.MethodGet, "/api/targets", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Targets []targetAgentResponse `json:"targets"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("decode response: %v", err) + } + + for _, target := range resp.Targets { + if target.Name == name { + return target + } + } + t.Fatalf("target %q not found in response", name) + return targetAgentResponse{} +} + +func addAgentFile(t *testing.T, dir, name string) string { + t.Helper() + + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatalf("mkdir agent source: %v", err) + } + path := filepath.Join(dir, name) + if err := os.WriteFile(path, []byte("# "+name), 0644); err != nil { + t.Fatalf("write agent file: %v", err) + } + return path +} + +func addAgentLink(t *testing.T, dir, name, source string) string { + t.Helper() + + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatalf("mkdir agent target: %v", err) + } + linkPath := filepath.Join(dir, name) + if err := os.Symlink(source, linkPath); err != nil { + t.Fatalf("symlink agent: %v", err) + } + return linkPath +} + +func newProjectTargetServer(t *testing.T, targets []config.ProjectTargetEntry) (*Server, string) { + t.Helper() + + projectRoot := t.TempDir() + if err := os.MkdirAll(filepath.Join(projectRoot, ".skillshare", "skills"), 0755); err != nil { + t.Fatalf("mkdir project skills: %v", err) + } + if err := os.MkdirAll(filepath.Join(projectRoot, ".skillshare", "agents"), 0755); err != nil { + t.Fatalf("mkdir project agents: %v", err) + } + + projectCfg := &config.ProjectConfig{Targets: targets} + if err := projectCfg.Save(projectRoot); err != nil { + t.Fatalf("save project config: %v", err) + } + + resolvedTargets, err := config.ResolveProjectTargets(projectRoot, projectCfg) + if err != nil { + t.Fatalf("resolve project targets: %v", err) + } + + cfg := &config.Config{ + Source: filepath.Join(projectRoot, ".skillshare", "skills"), + Mode: "merge", + Targets: resolvedTargets, + } + + return NewProject(cfg, projectCfg, projectRoot, "127.0.0.1:0", "", ""), projectRoot +} diff --git a/internal/server/handler_targets_test.go b/internal/server/handler_targets_test.go index f6356b1f..5958b20c 100644 --- a/internal/server/handler_targets_test.go +++ b/internal/server/handler_targets_test.go @@ -272,3 +272,70 @@ func TestHandleUpdateTarget_ClearFilters(t *testing.T) { t.Errorf("GET exclude should be empty after clear, got %v", resp.Targets[0].Exclude) } } + +func TestHandleUpdateTarget_AgentIncludeExclude_Persisted(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // PATCH agent include/exclude + body := `{"agent_include":["review-*"],"agent_exclude":["draft-*"]}` + req := httptest.NewRequest(http.MethodPatch, "/api/targets/claude", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusOK { + t.Fatalf("PATCH expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + // Verify disk persistence + diskCfg, err := config.Load() + if err != nil { + t.Fatalf("failed to load config from disk: %v", err) + } + tgt, ok := diskCfg.Targets["claude"] + if !ok { + t.Fatal("target 'claude' not found in disk config") + } + ac := tgt.AgentsConfig() + if len(ac.Include) != 1 || ac.Include[0] != "review-*" { + t.Errorf("disk agent include mismatch: got %v", ac.Include) + } + if len(ac.Exclude) != 1 || ac.Exclude[0] != "draft-*" { + t.Errorf("disk agent exclude mismatch: got %v", ac.Exclude) + } + + // Verify in-memory state + memTgt := s.cfg.Targets["claude"] + memAc := memTgt.AgentsConfig() + if len(memAc.Include) != 1 || memAc.Include[0] != "review-*" { + t.Errorf("in-memory agent include mismatch: got %v", memAc.Include) + } + if len(memAc.Exclude) != 1 || memAc.Exclude[0] != "draft-*" { + t.Errorf("in-memory agent exclude mismatch: got %v", memAc.Exclude) + } +} + +func TestHandleUpdateTarget_AgentInclude_InvalidPattern(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + body := `{"agent_include":["[unclosed"]}` + req := httptest.NewRequest(http.MethodPatch, "/api/targets/claude", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusBadRequest { + t.Errorf("expected 400 for invalid agent include pattern, got %d", rr.Code) + } +} + +func TestHandleUpdateTarget_AgentExclude_InvalidPattern(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, _ := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + body := `{"agent_exclude":["[bad"]}` + req := httptest.NewRequest(http.MethodPatch, "/api/targets/claude", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + if rr.Code != http.StatusBadRequest { + t.Errorf("expected 400 for invalid agent exclude pattern, got %d", rr.Code) + } +} diff --git a/internal/server/handler_toggle.go b/internal/server/handler_toggle.go index 913dcc9e..6792ed5f 100644 --- a/internal/server/handler_toggle.go +++ b/internal/server/handler_toggle.go @@ -22,20 +22,42 @@ func (s *Server) handleToggleSkill(w http.ResponseWriter, r *http.Request, enabl start := time.Now() name := r.PathValue("name") + kind := r.URL.Query().Get("kind") + if kind != "" && kind != "agent" && kind != "skill" { + writeError(w, http.StatusBadRequest, "invalid kind: "+kind) + return + } // Resolve under RLock — discovery is I/O-heavy, don't hold write lock s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() s.mu.RUnlock() - relPath, isDisabled, err := s.resolveSkillRelPathWithStatus(source, name) - if err != nil { - writeError(w, http.StatusNotFound, err.Error()) - return + relPath := "" + isDisabled := false + ignorePath := "" + if kind == "agent" { + if agentsSource == "" { + writeError(w, http.StatusNotFound, "agent not found: "+name) + return + } + var err error + relPath, isDisabled, err = s.resolveAgentRelPathWithStatus(agentsSource, name) + if err != nil { + writeError(w, http.StatusNotFound, err.Error()) + return + } + ignorePath = filepath.Join(agentsSource, ".agentignore") + } else { + var err error + relPath, isDisabled, err = s.resolveSkillRelPathWithStatus(source, name) + if err != nil { + writeError(w, http.StatusNotFound, err.Error()) + return + } + ignorePath = filepath.Join(source, ".skillignore") } - - ignorePath := filepath.Join(source, ".skillignore") - // Write lock only for the file mutation s.mu.Lock() defer s.mu.Unlock() @@ -73,6 +95,7 @@ func (s *Server) handleToggleSkill(w http.ResponseWriter, r *http.Request, enabl s.writeOpsLog(action, "ok", start, map[string]any{ "name": name, + "kind": kind, "scope": "ui", }, "") diff --git a/internal/server/handler_update.go b/internal/server/handler_update.go index 68ae4462..d12e8cea 100644 --- a/internal/server/handler_update.go +++ b/internal/server/handler_update.go @@ -6,6 +6,7 @@ import ( "net/http" "os" "path/filepath" + "strings" "time" "skillshare/internal/audit" @@ -16,6 +17,7 @@ import ( type updateRequest struct { Name string `json:"name"` + Kind string `json:"kind,omitempty"` Force bool `json:"force"` All bool `json:"all"` SkipAudit bool `json:"skipAudit"` @@ -100,8 +102,12 @@ func (s *Server) handleUpdate(w http.ResponseWriter, r *http.Request) { writeError(w, http.StatusBadRequest, "name is required (or use all: true)") return } + if body.Kind != "" && body.Kind != "skill" && body.Kind != "agent" { + writeError(w, http.StatusBadRequest, "invalid kind: "+body.Kind) + return + } - result := s.updateSingle(body.Name, body.Force, body.SkipAudit) + result := s.updateSingleByKind(body.Name, body.Kind, body.Force, body.SkipAudit) status := "ok" msg := "" if result.Action == "error" { @@ -124,6 +130,13 @@ func (s *Server) handleUpdate(w http.ResponseWriter, r *http.Request) { } func (s *Server) updateSingle(name string, force, skipAudit bool) updateResultItem { + return s.updateSingleByKind(name, "", force, skipAudit) +} + +func (s *Server) updateSingleByKind(name, kind string, force, skipAudit bool) updateResultItem { + if kind == "agent" { + return s.updateAgent(name, force, skipAudit) + } // Try exact skill path first (prevents basename collision with nested repos) skillPath := filepath.Join(s.cfg.Source, name) if entry := s.skillsStore.GetByPath(name); entry != nil && entry.Source != "" { @@ -146,6 +159,97 @@ func (s *Server) updateSingle(name string, force, skipAudit bool) updateResultIt } } +func (s *Server) updateAgent(name string, _ bool, _ bool) updateResultItem { + agentsSource := s.agentsSource() + if agentsSource == "" { + return updateResultItem{Name: name, Kind: "agent", Action: "error", Message: "agents source is not configured"} + } + + localAgent, err := resolveAgentResource(agentsSource, name) + if err != nil { + return updateResultItem{Name: name, Kind: "agent", Action: "error", Message: err.Error()} + } + + metaKey := agentMetaKey(localAgent.RelPath) + entry := s.agentsStore.GetByPath(metaKey) + if entry == nil || entry.Source == "" { + return updateResultItem{ + Name: metaKey, + Kind: "agent", + Action: "skipped", + Message: "agent is local and has no update source", + } + } + + source, err := install.ParseSource(entry.Source) + if err != nil { + return updateResultItem{Name: metaKey, Kind: "agent", Action: "error", Message: "invalid source: " + err.Error()} + } + + repoSubdir := strings.TrimSuffix(source.Subdir, entry.Subdir) + repoSubdir = strings.TrimRight(repoSubdir, "/") + source.Subdir = repoSubdir + + var discovery *install.DiscoveryResult + if source.HasSubdir() { + discovery, err = install.DiscoverFromGitSubdir(source) + } else { + discovery, err = install.DiscoverFromGit(source) + } + if err != nil { + return updateResultItem{Name: metaKey, Kind: "agent", Action: "error", Message: err.Error()} + } + defer install.CleanupDiscovery(discovery) + + if discovery.CommitHash != "" && discovery.CommitHash == entry.Version { + return updateResultItem{Name: metaKey, Kind: "agent", Action: "up-to-date"} + } + + var target *install.AgentInfo + for i := range discovery.Agents { + candidate := discovery.Agents[i] + if candidate.Path == entry.Subdir || + candidate.FileName == filepath.Base(localAgent.RelPath) || + candidate.Name == filepath.Base(metaKey) { + target = &discovery.Agents[i] + break + } + } + if target == nil { + return updateResultItem{ + Name: metaKey, + Kind: "agent", + Action: "error", + Message: fmt.Sprintf("agent path %q not found in repository", entry.Subdir), + } + } + + destDir := filepath.Dir(localAgent.SourcePath) + res, err := install.InstallAgentFromDiscovery(discovery, *target, destDir, install.InstallOptions{ + Kind: "agent", + Force: true, + SourceDir: agentsSource, + }) + if err != nil { + return updateResultItem{Name: metaKey, Kind: "agent", Action: "error", Message: err.Error()} + } + + if st, loadErr := install.LoadMetadataWithMigration(agentsSource, install.MetadataKindAgent); loadErr == nil && st != nil { + s.agentsStore = st + } + + message := res.Action + if message == "" { + message = "updated" + } + return updateResultItem{ + Name: metaKey, + Kind: "agent", + Action: "updated", + Message: message, + } +} + func (s *Server) updateTrackedRepo(name, repoPath string, force, skipAudit bool) updateResultItem { // Check for uncommitted changes if isDirty, _ := git.IsDirty(repoPath); isDirty { diff --git a/internal/server/resource_agents.go b/internal/server/resource_agents.go new file mode 100644 index 00000000..8394cbdf --- /dev/null +++ b/internal/server/resource_agents.go @@ -0,0 +1,50 @@ +package server + +import ( + "fmt" + "path/filepath" + "strings" + + "skillshare/internal/resource" +) + +func agentDisplayName(relPath string) string { + return strings.TrimSuffix(relPath, ".md") +} + +func matchesAgentName(d resource.DiscoveredResource, name string) bool { + return d.FlatName == name || + d.Name == name || + d.RelPath == name || + agentDisplayName(d.RelPath) == name +} + +func resolveAgentResource(agentsSource, name string) (resource.DiscoveredResource, error) { + discovered, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil { + return resource.DiscoveredResource{}, fmt.Errorf("failed to discover agents: %w", err) + } + for _, d := range discovered { + if matchesAgentName(d, name) { + return d, nil + } + } + return resource.DiscoveredResource{}, fmt.Errorf("agent not found: %s", name) +} + +func (s *Server) resolveAgentRelPathWithStatus(agentsSource, name string) (string, bool, error) { + discovered, err := resource.AgentKind{}.Discover(agentsSource) + if err != nil { + return "", false, fmt.Errorf("failed to discover agents: %w", err) + } + for _, d := range discovered { + if matchesAgentName(d, name) { + return d.RelPath, d.Disabled, nil + } + } + return "", false, fmt.Errorf("agent not found: %s", name) +} + +func agentMetaKey(relPath string) string { + return strings.TrimSuffix(filepath.ToSlash(relPath), ".md") +} diff --git a/internal/targetsummary/agents.go b/internal/targetsummary/agents.go new file mode 100644 index 00000000..a0fbe92a --- /dev/null +++ b/internal/targetsummary/agents.go @@ -0,0 +1,185 @@ +package targetsummary + +import ( + "os" + "path/filepath" + "strings" + + "skillshare/internal/config" + "skillshare/internal/resource" + ssync "skillshare/internal/sync" +) + +const defaultAgentMode = "merge" + +// AgentSummary describes the effective agent configuration and sync counts for +// a single target. ManagedCount maps to linked agents in merge/symlink mode and +// managed copied agents in copy mode. +type AgentSummary struct { + DisplayPath string + Path string + Mode string + Include []string + Exclude []string + ManagedCount int + ExpectedCount int +} + +// Builder caches the discovered agent source so multiple target summaries can +// share the same resolution and counting logic. +type Builder struct { + sourcePath string + projectRoot string + builtinAgents map[string]config.TargetConfig + activeAgents []resource.DiscoveredResource + sourceExists bool +} + +// NewGlobalBuilder returns a summary builder for global-mode targets. +func NewGlobalBuilder(cfg *config.Config) (*Builder, error) { + return newBuilder(cfg.EffectiveAgentsSource(), "", config.DefaultAgentTargets()) +} + +// NewProjectBuilder returns a summary builder for project-mode targets. +func NewProjectBuilder(projectRoot string) (*Builder, error) { + return newBuilder(filepath.Join(projectRoot, ".skillshare", "agents"), projectRoot, config.ProjectAgentTargets()) +} + +func newBuilder(sourcePath, projectRoot string, builtinAgents map[string]config.TargetConfig) (*Builder, error) { + builder := &Builder{ + sourcePath: sourcePath, + projectRoot: projectRoot, + builtinAgents: builtinAgents, + } + + if !dirExists(sourcePath) { + return builder, nil + } + + discovered, err := resource.AgentKind{}.Discover(sourcePath) + if err != nil { + return nil, err + } + builder.activeAgents = resource.ActiveAgents(discovered) + builder.sourceExists = true + return builder, nil +} + +// GlobalTarget returns the effective agents summary for a global target. +func (b *Builder) GlobalTarget(name string, tc config.TargetConfig) (*AgentSummary, error) { + ac := tc.AgentsConfig() + displayPath := ac.Path + if displayPath == "" { + if builtin, ok := b.builtinAgents[name]; ok { + displayPath = config.ExpandPath(builtin.Path) + } + } + if displayPath == "" { + return nil, nil + } + + return b.buildSummary(config.ExpandPath(displayPath), displayPath, ac.Mode, ac.Include, ac.Exclude) +} + +// ProjectTarget returns the effective agents summary for a project target. +func (b *Builder) ProjectTarget(entry config.ProjectTargetEntry) (*AgentSummary, error) { + ac := entry.AgentsConfig() + displayPath := ac.Path + if displayPath == "" { + if builtin, ok := b.builtinAgents[entry.Name]; ok { + displayPath = builtin.Path + } + } + if displayPath == "" { + return nil, nil + } + + return b.buildSummary(resolveProjectPath(b.projectRoot, displayPath), displayPath, ac.Mode, ac.Include, ac.Exclude) +} + +func (b *Builder) buildSummary(path, displayPath, mode string, include, exclude []string) (*AgentSummary, error) { + if mode == "" { + mode = defaultAgentMode + } + + summary := &AgentSummary{ + Path: path, + DisplayPath: displayPath, + Mode: mode, + Include: append([]string(nil), include...), + Exclude: append([]string(nil), exclude...), + } + + expectedAgents := b.activeAgents + if b.sourceExists && mode != "symlink" { + filtered, err := ssync.FilterAgents(expectedAgents, include, exclude) + if err != nil { + return nil, err + } + expectedAgents = filtered + } + + if b.sourceExists { + summary.ExpectedCount = len(expectedAgents) + } + summary.ManagedCount = countManagedAgents(path, mode, b.sourcePath, summary.ExpectedCount) + + return summary, nil +} + +func countManagedAgents(targetPath, mode, sourcePath string, expectedCount int) int { + switch mode { + case "copy": + _, managed, _ := ssync.CheckStatusCopy(targetPath) + return managed + case "symlink": + if ssync.CheckStatus(targetPath, sourcePath) == ssync.StatusLinked { + return expectedCount + } + return 0 + default: + return countHealthyAgentLinks(targetPath) + } +} + +func countHealthyAgentLinks(dir string) int { + entries, err := os.ReadDir(dir) + if err != nil { + return 0 + } + + linked := 0 + for _, entry := range entries { + if entry.IsDir() { + continue + } + if !strings.HasSuffix(strings.ToLower(entry.Name()), ".md") { + continue + } + if entry.Type()&os.ModeSymlink == 0 { + continue + } + if _, err := os.Stat(filepath.Join(dir, entry.Name())); err == nil { + linked++ + } + } + + return linked +} + +func dirExists(path string) bool { + info, err := os.Stat(path) + return err == nil && info.IsDir() +} + +func resolveProjectPath(projectRoot, path string) string { + if path == "" { + return "" + } + + resolved := config.ExpandPath(path) + if !filepath.IsAbs(resolved) { + return filepath.Join(projectRoot, filepath.FromSlash(resolved)) + } + return resolved +} diff --git a/tests/integration/target_filter_test.go b/tests/integration/target_filter_test.go index 8f1fb0db..83e62473 100644 --- a/tests/integration/target_filter_test.go +++ b/tests/integration/target_filter_test.go @@ -202,10 +202,15 @@ func TestTargetFilter_HelpShowsFilterFlags(t *testing.T) { result := sb.RunCLI("target", "help") result.AssertSuccess(t) + result.AssertOutputContains(t, "--agent-mode") result.AssertOutputContains(t, "--add-include") result.AssertOutputContains(t, "--add-exclude") result.AssertOutputContains(t, "--remove-include") result.AssertOutputContains(t, "--remove-exclude") + result.AssertOutputContains(t, "--add-agent-include") + result.AssertOutputContains(t, "--add-agent-exclude") + result.AssertOutputContains(t, "--remove-agent-include") + result.AssertOutputContains(t, "--remove-agent-exclude") result.AssertOutputContains(t, "Project mode") } @@ -223,3 +228,100 @@ func TestTargetFilter_Project_AddAndShow(t *testing.T) { info.AssertSuccess(t) info.AssertOutputContains(t, "Include: team-*") } + +func TestTargetFilter_AgentAddInclude(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + path: ` + targetPath + ` +`) + + result := sb.RunCLI("target", "claude", "--add-agent-include", "team-*") + result.AssertSuccess(t) + result.AssertOutputContains(t, "added agent include: team-*") + + configContent := sb.ReadFile(sb.ConfigPath) + if !strings.Contains(configContent, "agents:") { + t.Fatal("agents block should be written to config") + } + if !strings.Contains(configContent, "team-*") { + t.Fatal("agent include pattern should be in config") + } + + info := sb.RunCLI("target", "claude") + info.AssertSuccess(t) + info.AssertOutputContains(t, "Agents:") + info.AssertOutputContains(t, "Include: team-*") +} + +func TestTargetFilter_AgentModeAndSymlinkGuard(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := sb.CreateTarget("claude") + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + path: ` + targetPath + ` +`) + + mode := sb.RunCLI("target", "claude", "--agent-mode", "copy") + mode.AssertSuccess(t) + mode.AssertOutputContains(t, "Changed claude agent mode: merge -> copy") + + info := sb.RunCLI("target", "claude") + info.AssertSuccess(t) + info.AssertOutputContains(t, "Agents:") + info.AssertOutputContains(t, "Mode: copy") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + path: ` + targetPath + ` + agents: + mode: symlink +`) + + symlinkInfo := sb.RunCLI("target", "claude") + symlinkInfo.AssertSuccess(t) + symlinkInfo.AssertOutputContains(t, "Filters: ignored in symlink mode") + + rejected := sb.RunCLI("target", "claude", "--add-agent-include", "team-*") + rejected.AssertFailure(t) + rejected.AssertAnyOutputContains(t, "ignored in symlink mode") +} + +func TestTargetFilter_AgentUnsupportedTarget(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + targetPath := filepath.Join(sb.Root, "custom-skills") + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + custom-tool: + path: ` + targetPath + ` +`) + + result := sb.RunCLI("target", "custom-tool", "--add-agent-include", "team-*") + result.AssertFailure(t) + result.AssertAnyOutputContains(t, "does not have an agents path") +} + +func TestTargetFilter_Project_AgentAddAndShow(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + projectRoot := sb.SetupProjectDir("claude") + + result := sb.RunCLIInDir(projectRoot, "target", "claude", "--add-agent-include", "team-*", "-p") + result.AssertSuccess(t) + result.AssertOutputContains(t, "added agent include: team-*") + + info := sb.RunCLIInDir(projectRoot, "target", "claude", "-p") + info.AssertSuccess(t) + info.AssertOutputContains(t, "Agents:") + info.AssertOutputContains(t, "Include: team-*") +} diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index bf608606..86543088 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -128,20 +128,33 @@ export const api = { // Resources (skills + agents) listSkills: (kind?: 'skill' | 'agent') => apiFetch<{ resources: Skill[] }>(kind ? `/resources?kind=${kind}` : '/resources'), - getSkill: (name: string) => - apiFetch<{ resource: Skill; skillMdContent: string; files: string[] }>(`/resources/${encodeURIComponent(name)}`), - deleteSkill: (name: string) => - apiFetch<{ success: boolean }>(`/resources/${encodeURIComponent(name)}`, { method: 'DELETE' }), - disableSkill: (name: string) => + getResource: (name: string, kind?: 'skill' | 'agent') => + apiFetch<{ resource: Skill; skillMdContent: string; files: string[] }>( + `/resources/${encodeURIComponent(name)}${kind ? `?kind=${kind}` : ''}` + ), + getSkill: (name: string, kind?: 'skill' | 'agent') => + api.getResource(name, kind), + deleteResource: (name: string, kind?: 'skill' | 'agent') => + apiFetch<{ success: boolean }>( + `/resources/${encodeURIComponent(name)}${kind ? `?kind=${kind}` : ''}`, + { method: 'DELETE' } + ), + deleteSkill: (name: string, kind?: 'skill' | 'agent') => + api.deleteResource(name, kind), + disableResource: (name: string, kind?: 'skill' | 'agent') => apiFetch<{ success: boolean; name: string; disabled: boolean }>( - `/resources/${encodeURIComponent(name)}/disable`, + `/resources/${encodeURIComponent(name)}/disable${kind ? `?kind=${kind}` : ''}`, { method: 'POST' } ), - enableSkill: (name: string) => + disableSkill: (name: string, kind?: 'skill' | 'agent') => + api.disableResource(name, kind), + enableResource: (name: string, kind?: 'skill' | 'agent') => apiFetch<{ success: boolean; name: string; disabled: boolean }>( - `/resources/${encodeURIComponent(name)}/enable`, + `/resources/${encodeURIComponent(name)}/enable${kind ? `?kind=${kind}` : ''}`, { method: 'POST' } ), + enableSkill: (name: string, kind?: 'skill' | 'agent') => + api.enableResource(name, kind), batchUninstall: (opts: BatchUninstallRequest) => apiFetch('/uninstall/batch', { method: 'POST', @@ -180,7 +193,7 @@ export const api = { }), removeTarget: (name: string) => apiFetch<{ success: boolean }>(`/targets/${encodeURIComponent(name)}`, { method: 'DELETE' }), - updateTarget: (name: string, opts: { include?: string[]; exclude?: string[]; mode?: string; target_naming?: string; agent_mode?: string }) => + updateTarget: (name: string, opts: { include?: string[]; exclude?: string[]; mode?: string; target_naming?: string; agent_mode?: string; agent_include?: string[]; agent_exclude?: string[] }) => apiFetch<{ success: boolean }>(`/targets/${encodeURIComponent(name)}`, { method: 'PATCH', body: JSON.stringify(opts), @@ -191,10 +204,16 @@ export const api = { apiFetch<{ entries: SyncMatrixEntry[] }>( `/sync-matrix${target ? '?target=' + encodeURIComponent(target) : ''}` ), - previewSyncMatrix: (target: string, include: string[], exclude: string[]) => + previewSyncMatrix: (target: string, include: string[], exclude: string[], agentInclude?: string[], agentExclude?: string[]) => apiFetch<{ entries: SyncMatrixEntry[] }>('/sync-matrix/preview', { method: 'POST', - body: JSON.stringify({ target, include, exclude }), + body: JSON.stringify({ + target, + include, + exclude, + ...(agentInclude && { agent_include: agentInclude }), + ...(agentExclude && { agent_exclude: agentExclude }), + }), }), // Sync @@ -273,7 +292,7 @@ export const api = { }), // Update - update: (opts: { name?: string; force?: boolean; all?: boolean; skipAudit?: boolean }) => + update: (opts: { name?: string; kind?: 'skill' | 'agent'; force?: boolean; all?: boolean; skipAudit?: boolean }) => apiFetch<{ results: UpdateResultItem[] }>('/update', { method: 'POST', body: JSON.stringify(opts), @@ -492,6 +511,8 @@ export interface TrackedRepo { export interface Overview { source: string; + agentsSource?: string; + extrasSource?: string; skillCount: number; agentCount: number; topLevelCount: number; diff --git a/ui/src/components/KindBadge.tsx b/ui/src/components/KindBadge.tsx index 2dc9ca1c..b5488979 100644 --- a/ui/src/components/KindBadge.tsx +++ b/ui/src/components/KindBadge.tsx @@ -1,12 +1,18 @@ -import Badge from './Badge'; - interface KindBadgeProps { kind: 'skill' | 'agent'; } +const styles = { + agent: 'text-blue bg-info-light', + skill: 'text-pencil-light bg-muted', +}; + export default function KindBadge({ kind }: KindBadgeProps) { - if (kind === 'agent') { - return Agent; - } - return Skill; + return ( + + {kind} + + ); } diff --git a/ui/src/components/TargetMenu.tsx b/ui/src/components/TargetMenu.tsx index 8a0b34b1..42446df6 100644 --- a/ui/src/components/TargetMenu.tsx +++ b/ui/src/components/TargetMenu.tsx @@ -276,6 +276,7 @@ function SubmenuTrigger({ interface TargetMenuProps { currentTargets: string[] | null; // null = All isUniform?: boolean; // for folders + showTargets?: boolean; /** Menu item label. Defaults to "Available in...". */ label?: string; /** Additional flat action items appended after the target submenu (e.g. Uninstall). */ @@ -289,6 +290,7 @@ interface TargetMenuProps { export default function TargetMenu({ currentTargets, isUniform = true, + showTargets = true, label = 'Available in...', extraItems, onSelect, @@ -308,29 +310,35 @@ export default function TargetMenu({ const targets = (availableData?.targets ?? []).filter((t) => t.installed); const isAllSelected = isUniform && (!currentTargets || currentTargets.length === 0); - const setTargetItem: ContextMenuItem = { - key: 'set-target', - label, - icon: , - items: [ - { - key: '__all__', - label: 'All', - selected: isAllSelected, - onSelect: () => onSelect(null), - }, - ...targets.map((t) => ({ - key: t.name, - label: t.name, - selected: isUniform && currentTargets?.length === 1 && currentTargets[0] === t.name, - onSelect: () => onSelect(t.name), - })), - ], - }; + const items: ContextMenuItem[] = []; + if (showTargets) { + items.push({ + key: 'set-target', + label, + icon: , + items: [ + { + key: '__all__', + label: 'All', + selected: isAllSelected, + onSelect: () => onSelect(null), + }, + ...targets.map((t) => ({ + key: t.name, + label: t.name, + selected: isUniform && currentTargets?.length === 1 && currentTargets[0] === t.name, + onSelect: () => onSelect(t.name), + })), + ], + }); + } + items.push(...(extraItems ?? [])); + + if (items.length === 0) return null; return ( -

- Source Directory +

+ Source Directories

-

- {data.source} -

-

- This is where your skills live. All targets sync from here. +

+ + {data.agentsSource && } + {data.extrasSource && } +
+

+ All targets sync from these directories.

@@ -411,6 +408,15 @@ export default function DashboardPage() { ); } +function SourceRow({ label, path }: { label: string; path: string }) { + return ( +
+ {label} + {path} +
+ ); +} + /* -- Tracked Repositories Section --------------------- */ function TrackedReposSection({ repos }: { repos: { name: string; skillCount: number; dirty: boolean }[] }) { diff --git a/ui/src/pages/FilterStudioPage.tsx b/ui/src/pages/FilterStudioPage.tsx index 5b2888d3..6401c6d2 100644 --- a/ui/src/pages/FilterStudioPage.tsx +++ b/ui/src/pages/FilterStudioPage.tsx @@ -1,5 +1,5 @@ import { useState, useEffect, useCallback, useRef, useMemo, memo } from 'react'; -import { useParams, useNavigate } from 'react-router-dom'; +import { useParams, useNavigate, useSearchParams } from 'react-router-dom'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import { Virtuoso } from 'react-virtuoso'; import { Filter, Check, X, Info, PackageOpen, Search } from 'lucide-react'; @@ -13,14 +13,21 @@ import Spinner from '../components/Spinner'; import PageHeader from '../components/PageHeader'; import EmptyState from '../components/EmptyState'; import FilterTagInput from '../components/FilterTagInput'; +import KindBadge from '../components/KindBadge'; import { radius } from '../design'; +type FilterKind = 'skill' | 'agent'; + export default function FilterStudioPage() { const { name } = useParams<{ name: string }>(); + const [searchParams] = useSearchParams(); const navigate = useNavigate(); const queryClient = useQueryClient(); const { toast } = useToast(); + const kind: FilterKind = searchParams.get('kind') === 'agent' ? 'agent' : 'skill'; + const kindLabel = kind === 'agent' ? 'agents' : 'skills'; + // Load current target config const targetsQuery = useQuery({ queryKey: queryKeys.targets.all, @@ -33,7 +40,7 @@ export default function FilterStudioPage() { [targetsQuery.data, name], ); - // Draft filter state + // Draft filter state for active kind const [include, setInclude] = useState([]); const [exclude, setExclude] = useState([]); const [initialized, setInitialized] = useState(false); @@ -41,11 +48,16 @@ export default function FilterStudioPage() { // Initialize draft from target config once loaded useEffect(() => { if (target && !initialized) { - setInclude(target.include ?? []); - setExclude(target.exclude ?? []); + if (kind === 'agent') { + setInclude(target.agentInclude ?? []); + setExclude(target.agentExclude ?? []); + } else { + setInclude(target.include ?? []); + setExclude(target.exclude ?? []); + } setInitialized(true); } - }, [target, initialized]); + }, [target, initialized, kind]); // Debounced preview const [preview, setPreview] = useState([]); @@ -57,7 +69,11 @@ export default function FilterStudioPage() { if (!name) return; setPreviewLoading(true); try { - const res = await api.previewSyncMatrix(name, inc, exc); + const skillInc = kind === 'skill' ? inc : []; + const skillExc = kind === 'skill' ? exc : []; + const agentInc = kind === 'agent' ? inc : []; + const agentExc = kind === 'agent' ? exc : []; + const res = await api.previewSyncMatrix(name, skillInc, skillExc, agentInc, agentExc); setPreview(res.entries); } catch { // silently ignore preview errors @@ -65,7 +81,7 @@ export default function FilterStudioPage() { setPreviewLoading(false); } }, - [name], + [name, kind], ); // Trigger debounced preview on filter change @@ -76,16 +92,22 @@ export default function FilterStudioPage() { return () => clearTimeout(debounceRef.current); }, [include, exclude, initialized, fetchPreview]); + // Filter preview entries to only show the active kind + const kindPreview = useMemo(() => { + if (kind === 'agent') return preview.filter((e) => e.kind === 'agent'); + return preview.filter((e) => e.kind !== 'agent'); + }, [preview, kind]); + // Unsaved changes detection const hasChanges = useMemo(() => { if (!target) return false; - const savedInc = target.include ?? []; - const savedExc = target.exclude ?? []; + const savedInc = kind === 'agent' ? (target.agentInclude ?? []) : (target.include ?? []); + const savedExc = kind === 'agent' ? (target.agentExclude ?? []) : (target.exclude ?? []); return ( JSON.stringify(include) !== JSON.stringify(savedInc) || JSON.stringify(exclude) !== JSON.stringify(savedExc) ); - }, [target, include, exclude]); + }, [target, include, exclude, kind]); // Save handler const [saving, setSaving] = useState(false); @@ -94,8 +116,11 @@ export default function FilterStudioPage() { if (!name) return; setSaving(true); try { - await api.updateTarget(name, { include, exclude }); - toast(`Filters for "${name}" saved.`, 'success'); + const payload = kind === 'agent' + ? { agent_include: include, agent_exclude: exclude } + : { include, exclude }; + await api.updateTarget(name, payload); + toast(`${kind === 'agent' ? 'Agent' : 'Skill'} filters for "${name}" saved.`, 'success'); queryClient.invalidateQueries({ queryKey: queryKeys.targets.all }); queryClient.invalidateQueries({ queryKey: queryKeys.syncMatrix() }); if (goBack) navigate('/targets'); @@ -107,33 +132,31 @@ export default function FilterStudioPage() { }; // Click-to-toggle on preview items - const handleToggleSkill = (entry: SyncMatrixEntry) => { + const handleToggle = (entry: SyncMatrixEntry) => { if (entry.status === 'skill_target_mismatch') return; - const skill = entry.skill; + const item = entry.skill; if (entry.status === 'synced') { - // Exclude this skill: add to exclude, remove from include - setExclude((prev) => prev.includes(skill) ? prev : [...prev, skill]); - setInclude((prev) => prev.filter((p) => p !== skill)); + setExclude((prev) => prev.includes(item) ? prev : [...prev, item]); + setInclude((prev) => prev.filter((p) => p !== item)); } else { - // Include this skill: add to include, remove from exclude - setInclude((prev) => prev.includes(skill) ? prev : [...prev, skill]); - setExclude((prev) => prev.filter((p) => p !== skill)); + setInclude((prev) => prev.includes(item) ? prev : [...prev, item]); + setExclude((prev) => prev.filter((p) => p !== item)); } }; // Preview search filter const [previewSearch, setPreviewSearch] = useState(''); const filteredPreview = useMemo(() => { - if (!previewSearch) return preview; + if (!previewSearch) return kindPreview; const q = previewSearch.toLowerCase(); - return preview.filter((e) => e.skill.toLowerCase().includes(q)); - }, [preview, previewSearch]); + return kindPreview.filter((e) => e.skill.toLowerCase().includes(q)); + }, [kindPreview, previewSearch]); - // Summary counts (always from full preview, not filtered) + // Summary counts (from kind-filtered preview, not search-filtered) const { syncedCount, totalCount } = useMemo(() => ({ - syncedCount: preview.filter((e) => e.status === 'synced').length, - totalCount: preview.length, - }), [preview]); + syncedCount: kindPreview.filter((e) => e.status === 'synced').length, + totalCount: kindPreview.length, + }), [kindPreview]); if (targetsQuery.isPending) { return ( @@ -165,7 +188,12 @@ export default function FilterStudioPage() { } title="Filter Studio" - subtitle={`Route specific skills to ${name}. Use glob patterns like frontend*, _team__*.`} + subtitle={ + + + Route specific {kindLabel} to {name} + + } backTo="/targets" actions={ <> @@ -201,7 +229,9 @@ export default function FilterStudioPage() {
{/* Left column — Filter Rules */} -

Filter Rules

+

+ {kind === 'agent' ? 'Agent' : 'Skill'} Filter Rules +

}
- {preview.length === 0 && !previewLoading ? ( + {kindPreview.length === 0 && !previewLoading ? ( ) : ( <> @@ -243,7 +273,7 @@ export default function FilterStudioPage() { type="text" value={previewSearch} onChange={(e) => setPreviewSearch(e.target.value)} - placeholder="Filter skills..." + placeholder={`Filter ${kindLabel}...`} className="w-full pl-8 pr-3 py-1.5 text-sm text-pencil bg-surface border-2 border-muted font-mono placeholder:text-muted-dark focus:border-pencil focus:outline-none" style={{ borderRadius: radius.sm }} /> @@ -255,7 +285,7 @@ export default function FilterStudioPage() { > {filteredPreview.length === 0 && previewSearch ? (

- No skills matching "{previewSearch}" + No {kindLabel} matching “{previewSearch}”

) : ( ( handleToggleSkill(filteredPreview[index])} + kind={kind} + onClick={() => handleToggle(filteredPreview[index])} /> )} /> @@ -274,7 +305,7 @@ export default function FilterStudioPage() {

{syncedCount} - /{totalCount} skills will sync + /{totalCount} {kindLabel} will sync {previewSearch && ` · showing ${filteredPreview.length}`}

@@ -288,13 +319,16 @@ export default function FilterStudioPage() { /** Single preview row with status indicator and click-to-toggle */ const PreviewRow = memo(function PreviewRow({ entry, + kind, onClick, }: { entry: SyncMatrixEntry; + kind: FilterKind; onClick: () => void; }) { const isMismatch = entry.status === 'skill_target_mismatch'; const clickable = !isMismatch; + const label = kind === 'agent' ? 'agent' : 'skill'; return (
diff --git a/ui/src/pages/ResourceDetailPage.tsx b/ui/src/pages/ResourceDetailPage.tsx index f45c1657..483abaa8 100644 --- a/ui/src/pages/ResourceDetailPage.tsx +++ b/ui/src/pages/ResourceDetailPage.tsx @@ -1,4 +1,4 @@ -import { useParams, useNavigate, Link } from 'react-router-dom'; +import { useParams, useNavigate, Link, useSearchParams } from 'react-router-dom'; import { ArrowLeft, Trash2, ExternalLink, FileText, ArrowUpRight, RefreshCw, Target, Type, AlignLeft, Files, Scale, Zap, @@ -97,9 +97,10 @@ function parseSkillMarkdown(content: string): { manifest: SkillManifest; markdow } function skillTypeLabel(type?: string): string { - if (!type) return 'local'; - if (type === 'github-subdir') return 'github'; - return type; + if (!type) return 'Local'; + if (type === 'github-subdir') return 'GitHub'; + if (type === 'github') return 'GitHub'; + return type.charAt(0).toUpperCase() + type.slice(1); } /** Returns a lucide icon component + color class for a filename */ @@ -155,11 +156,17 @@ function ContentStatsBar({ content, description, body, fileCount, license }: { c export default function SkillDetailPage() { const { name } = useParams<{ name: string }>(); + const [searchParams] = useSearchParams(); const navigate = useNavigate(); const queryClient = useQueryClient(); + const requestedKind = searchParams.get('kind') === 'agent' + ? 'agent' + : searchParams.get('kind') === 'skill' + ? 'skill' + : undefined; const { data, isPending, error } = useQuery({ - queryKey: queryKeys.skills.detail(name!), - queryFn: () => api.getSkill(name!), + queryKey: [...queryKeys.skills.detail(name!), requestedKind], + queryFn: () => api.getResource(name!, requestedKind), staleTime: staleTimes.skills, enabled: !!name, }); @@ -205,7 +212,7 @@ export default function SkillDetailPage() { return (

- Failed to load skill + Failed to load resource

{error.message}

@@ -288,8 +295,8 @@ export default function SkillDetailPage() { await api.deleteRepo(repoName); toast(`Repository "${repoName}" uninstalled.`, 'success'); } else { - await api.deleteSkill(resource.flatName); - toast(`Skill "${resource.name}" uninstalled.`, 'success'); + await api.deleteResource(resource.flatName, resource.kind); + toast(`${resource.kind === 'agent' ? 'Agent' : 'Skill'} "${resource.name}" uninstalled.`, 'success'); } await queryClient.invalidateQueries({ queryKey: queryKeys.skills.all }); await queryClient.invalidateQueries({ queryKey: queryKeys.overview }); @@ -306,8 +313,12 @@ export default function SkillDetailPage() { setUpdating(true); setBlockedMessage(null); try { - const skillName = resource.isInRepo ? resource.relPath.split('/')[0] : resource.relPath; - const res = await api.update({ name: skillName, skipAudit }); + const resourceName = resource.isInRepo + ? resource.relPath.split('/')[0] + : resource.kind === 'agent' + ? resource.flatName + : resource.relPath; + const res = await api.update({ name: resourceName, kind: resource.kind, skipAudit }); const item = res.results[0]; if (item?.action === 'updated') { const auditInfo = item.auditRiskLabel @@ -337,10 +348,10 @@ export default function SkillDetailPage() { setToggling(true); try { if (resource.disabled) { - await api.enableSkill(resource.flatName); + await api.enableResource(resource.flatName, resource.kind); toast(`Enabled: ${resource.name}`, 'success'); } else { - await api.disableSkill(resource.flatName); + await api.disableResource(resource.flatName, resource.kind); toast(`Disabled: ${resource.name}`, 'success'); } await queryClient.invalidateQueries({ queryKey: queryKeys.skills.detail(name!) }); @@ -359,7 +370,7 @@ export default function SkillDetailPage() {
} - label="Back to skills" + label="Back to resources" size="lg" variant="outline" onClick={() => navigate('/resources')} @@ -433,7 +444,7 @@ export default function SkillDetailPage() { ) : (

- No SKILL.md content available. + No content available.

)}
@@ -596,7 +607,7 @@ export default function SkillDetailPage() { {/* Target Distribution */} - + {/* Target Sync Status */} @@ -638,11 +649,11 @@ export default function SkillDetailPage() { {/* Confirm uninstall dialog */} - {e.target} diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index 760ad599..cc1c897b 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -43,7 +43,7 @@ import PageHeader from '../components/PageHeader'; import SegmentedControl from '../components/SegmentedControl'; import Pagination from '../components/Pagination'; import { api } from '../api/client'; -import type { Skill } from '../api/client'; +import type { Skill, SyncMatrixEntry } from '../api/client'; import { radius } from '../design'; import ScrollToTop from '../components/ScrollToTop'; import Tooltip from '../components/Tooltip'; @@ -52,6 +52,7 @@ import { useToast } from '../components/Toast'; import TargetMenu, { SkillContextMenu, type ContextMenuItem } from '../components/TargetMenu'; import ConfirmDialog from '../components/ConfirmDialog'; import Spinner from '../components/Spinner'; +import { useSyncMatrix } from '../hooks/useSyncMatrix'; /* -- Sticky-note pastel palette (8 colors) --------- */ @@ -69,6 +70,43 @@ const SKILL_PASTELS_DARK = [ /* -- Shared skill action items hook --------------- */ type SkillsData = { resources: Skill[] }; +const EMPTY_RESOURCES: Skill[] = []; + +function resourceDetailHref(resource: Pick): string { + const kindQuery = resource.kind === 'agent' ? '?kind=agent' : ''; + return `/resources/${encodeURIComponent(resource.flatName)}${kindQuery}`; +} + +function resourceLabel(kind: Skill['kind'], capitalize = false): string { + const label = kind === 'agent' ? 'agent' : 'skill'; + return capitalize ? label[0].toUpperCase() + label.slice(1) : label; +} + +function summarizeAgentTargets(entries: SyncMatrixEntry[]): { label: string; title: string } { + if (entries.length === 0) { + return { + label: 'No agent targets', + title: 'No configured targets currently support agent sync.', + }; + } + + const synced = entries + .filter((entry) => entry.status === 'synced') + .map((entry) => entry.target) + .sort(); + + if (synced.length === 0) { + return { + label: 'Filtered out', + title: 'This agent is excluded by the current target agent filters.', + }; + } + + return { + label: synced.length > 2 ? `${synced.length} targets` : synced.join(', '), + title: synced.join(', '), + }; +} /** Optimistic update helper: patch skills cache and return rollback snapshot. */ function optimisticPatch( @@ -86,22 +124,22 @@ function optimisticPatch( return previous; } -function useSkillActions() { +function useResourceActions() { const queryClient = useQueryClient(); const { toast } = useToast(); const navigate = useNavigate(); const toggleMutation = useMutation({ - mutationFn: ({ name, disable }: { name: string; disable: boolean }) => - disable ? api.disableSkill(name) : api.enableSkill(name), - onMutate: async ({ name, disable }) => { + mutationFn: ({ name, kind, disable }: { name: string; kind: Skill['kind']; disable: boolean }) => + disable ? api.disableResource(name, kind) : api.enableResource(name, kind), + onMutate: async ({ name, kind, disable }) => { const previous = optimisticPatch(queryClient, (skills) => - skills.map((s) => s.flatName === name ? { ...s, disabled: disable } : s), + skills.map((s) => s.flatName === name && s.kind === kind ? { ...s, disabled: disable } : s), ); return { previous }; }, - onSuccess: (_, { name, disable }) => { - toast(`${name} ${disable ? 'disabled' : 'enabled'}`, 'success'); + onSuccess: (_, { name, kind, disable }) => { + toast(`${resourceLabel(kind, true)} ${name} ${disable ? 'disabled' : 'enabled'}`, 'success'); }, onError: (err: Error, _, ctx) => { if (ctx?.previous) queryClient.setQueryData(queryKeys.skills.all, ctx.previous); @@ -111,15 +149,15 @@ function useSkillActions() { }); const uninstallMutation = useMutation({ - mutationFn: (name: string) => api.deleteSkill(name), - onMutate: async (name) => { + mutationFn: ({ name, kind }: { name: string; kind: Skill['kind'] }) => api.deleteResource(name, kind), + onMutate: async ({ name, kind }) => { const previous = optimisticPatch(queryClient, (skills) => - skills.filter((s) => s.flatName !== name), + skills.filter((s) => !(s.flatName === name && s.kind === kind)), ); return { previous }; }, - onSuccess: (_, name) => { - toast(`Uninstalled ${name}`, 'success'); + onSuccess: (_, { name, kind }) => { + toast(`Uninstalled ${resourceLabel(kind)} ${name}`, 'success'); }, onError: (err: Error, _, ctx) => { if (ctx?.previous) queryClient.setQueryData(queryKeys.skills.all, ctx.previous); @@ -171,8 +209,8 @@ function useSkillActions() { }); /** Build extra context menu items for a single skill. */ - function buildSkillExtraItems( - skill: Pick, + function buildResourceExtraItems( + skill: Pick, onUninstall: () => void, onUninstallRepo: (repoName: string) => void, ): ContextMenuItem[] { @@ -181,7 +219,7 @@ function useSkillActions() { key: 'detail', label: 'View Detail', icon: , - onSelect: () => navigate(`/resources/${encodeURIComponent(skill.flatName)}`), + onSelect: () => navigate(resourceDetailHref(skill)), }, { key: 'toggle', @@ -189,10 +227,10 @@ function useSkillActions() { icon: skill.disabled ? : , - onSelect: () => toggleMutation.mutate({ name: skill.flatName, disable: !skill.disabled }), + onSelect: () => toggleMutation.mutate({ name: skill.flatName, kind: skill.kind, disable: !skill.disabled }), }, ]; - if (skill.isInRepo) { + if (skill.kind === 'skill' && skill.isInRepo) { items.push({ key: 'uninstall-repo', label: 'Uninstall Repo', @@ -210,7 +248,7 @@ function useSkillActions() { return items; } - return { uninstallMutation, uninstallRepoMutation, setTargetMutation, buildSkillExtraItems }; + return { uninstallMutation, uninstallRepoMutation, setTargetMutation, buildResourceExtraItems }; } /** Normalize skill targets: ["*"] or empty/null → [] (meaning All). */ @@ -585,7 +623,7 @@ const SkillPostit = memo(function SkillPostit({ return ( @@ -674,7 +712,7 @@ function ContextMenuTip() { >

- Right-click any skill or folder for quick actions — set target, enable/disable, uninstall, and more. + Right-click any resource or folder for quick actions — enable/disable, uninstall, and more.

- } + ) : undefined} /> {/* Resource type underline tabs */} @@ -959,6 +1013,7 @@ export default function SkillsPage() { point: { x: e.clientX, y: e.clientY }, skillFlatName: skill.flatName, skillName: skill.name, + kind: skill.kind, relPath: skill.relPath, disabled: !!skill.disabled, isInRepo: !!skill.isInRepo, @@ -972,13 +1027,14 @@ export default function SkillsPage() { ) : viewType === 'grouped' ? ( { setFilterType('all'); setSearch(''); } : undefined} /> ) : ( - + ) ) : ( setGridConfirmUninstall({ flatName: gridContextMenu.skillFlatName, name: gridContextMenu.skillName }), + showTargets={gridContextMenu.kind !== 'agent'} + extraItems={buildResourceExtraItems( + { + flatName: gridContextMenu.skillFlatName, + name: gridContextMenu.skillName, + relPath: gridContextMenu.relPath, + disabled: gridContextMenu.disabled, + isInRepo: gridContextMenu.isInRepo, + kind: gridContextMenu.kind, + }, + () => setGridConfirmUninstall({ flatName: gridContextMenu.skillFlatName, name: gridContextMenu.skillName, kind: gridContextMenu.kind }), (repoName) => { setGridConfirmUninstallRepo(repoName); setGridContextMenu(null); }, )} onSelect={(target) => { @@ -1014,13 +1078,13 @@ export default function SkillsPage() { )} Are you sure you want to uninstall {gridConfirmUninstall?.name}?} confirmText="Uninstall" variant="danger" loading={gridUninstallMutation.isPending} onConfirm={() => { - if (gridConfirmUninstall) gridUninstallMutation.mutate(gridConfirmUninstall.flatName); + if (gridConfirmUninstall) gridUninstallMutation.mutate({ name: gridConfirmUninstall.flatName, kind: gridConfirmUninstall.kind }); setGridConfirmUninstall(null); }} onCancel={() => setGridConfirmUninstall(null)} @@ -1047,8 +1111,9 @@ export default function SkillsPage() { const INDENT_PX = 24; -function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClearFilters }: { +function FolderTreeView({ skills, resourceKind, totalCount, isSearching, stickyTop = 0, onClearFilters }: { skills: Skill[]; + resourceKind: Skill['kind']; totalCount: number; isSearching: boolean; stickyTop?: number; @@ -1062,6 +1127,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea folderPath?: string; skillFlatName?: string; skillName?: string; + kind?: Skill['kind']; relPath?: string; disabled?: boolean; isInRepo?: boolean; @@ -1071,7 +1137,12 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea const queryClient = useQueryClient(); const { toast } = useToast(); - const { uninstallMutation, uninstallRepoMutation, setTargetMutation: singleMutation, buildSkillExtraItems: buildExtraItems } = useSkillActions(); + const { + uninstallMutation, + uninstallRepoMutation, + setTargetMutation: singleMutation, + buildResourceExtraItems: buildExtraItems, + } = useResourceActions(); const [confirmUninstallRepo, setConfirmUninstallRepo] = useState(null); const batchMutation = useMutation({ @@ -1116,6 +1187,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea const [confirmUninstall, setConfirmUninstall] = useState<{ flatName: string; name: string; + kind: Skill['kind']; } | null>(null); const tree = useMemo(() => buildTree(skills), [skills]); @@ -1218,7 +1290,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea onClick={() => toggleFolder(node.path)} onContextMenu={(e) => { e.preventDefault(); - if (batchMutation.isPending) return; + if (resourceKind === 'agent' || batchMutation.isPending) return; setContextMenu({ point: { x: e.clientX, y: e.clientY }, mode: 'folder', @@ -1248,7 +1320,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea > {node.childCount} - {node.targetSummary && ( + {resourceKind === 'skill' && node.targetSummary && ( {pendingFolder === node.path && } @@ -1331,18 +1404,20 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea {skill.branch} )} - - - - {skillTargetLabel} - - + {resourceKind === 'skill' && ( + + + + {skillTargetLabel} + + + )}
); - }, [rows, collapsed, isSearching, toggleFolder, setContextMenu, contextMenu, pendingFolder]); + }, [rows, collapsed, isSearching, toggleFolder, contextMenu, pendingFolder, resourceKind, batchMutation.isPending]); return (
@@ -1351,7 +1426,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea {isSearching ? ( <> - Showing {skills.length} of {totalCount} skills + Showing {skills.length} of {totalCount} {resourceKind === 'agent' ? 'agents' : 'skills'} {onClearFilters && ( <> {' '}·{' '} @@ -1404,7 +1479,7 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea > {stickyFolder.node.childCount} - {stickyFolder.node.targetSummary && ( + {resourceKind === 'skill' && stickyFolder.node.targetSummary && ( 0 @@ -1436,10 +1511,22 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea anchorPoint={contextMenu.point} currentTargets={contextMenu.currentTargets} isUniform={contextMenu.isUniform} + showTargets={resourceKind === 'skill'} label={contextMenu.mode === 'folder' ? 'Folder available in...' : 'Available in...'} extraItems={contextMenu.mode === 'skill' ? buildExtraItems( - { flatName: contextMenu.skillFlatName!, name: contextMenu.skillName ?? contextMenu.skillFlatName!, relPath: contextMenu.relPath ?? '', disabled: !!contextMenu.disabled, isInRepo: !!contextMenu.isInRepo }, - () => setConfirmUninstall({ flatName: contextMenu.skillFlatName!, name: contextMenu.skillName ?? contextMenu.skillFlatName! }), + { + flatName: contextMenu.skillFlatName!, + name: contextMenu.skillName ?? contextMenu.skillFlatName!, + relPath: contextMenu.relPath ?? '', + disabled: !!contextMenu.disabled, + isInRepo: !!contextMenu.isInRepo, + kind: contextMenu.kind ?? resourceKind, + }, + () => setConfirmUninstall({ + flatName: contextMenu.skillFlatName!, + name: contextMenu.skillName ?? contextMenu.skillFlatName!, + kind: contextMenu.kind ?? resourceKind, + }), (repoName) => { setConfirmUninstallRepo(repoName); setContextMenu(null); }, ) : undefined} onSelect={(target) => { @@ -1456,13 +1543,13 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea )} Are you sure you want to uninstall {confirmUninstall?.name}?} confirmText="Uninstall" variant="danger" loading={uninstallMutation.isPending} onConfirm={() => { - if (confirmUninstall) uninstallMutation.mutate(confirmUninstall.flatName); + if (confirmUninstall) uninstallMutation.mutate({ name: confirmUninstall.flatName, kind: confirmUninstall.kind }); setConfirmUninstall(null); }} onCancel={() => setConfirmUninstall(null)} @@ -1488,12 +1575,12 @@ function FolderTreeView({ skills, totalCount, isSearching, stickyTop = 0, onClea const TABLE_PAGE_SIZES = [10, 25, 50] as const; -function SkillsTable({ skills }: { skills: Skill[] }) { +function SkillsTable({ skills, resourceKind }: { skills: Skill[]; resourceKind: Skill['kind'] }) { const [page, setPage] = useState(0); const [pageSize, setPageSize] = useState(() => { const saved = localStorage.getItem('skillshare:table-page-size'); const n = saved ? parseInt(saved, 10) : 0; - return TABLE_PAGE_SIZES.includes(n as any) ? n : 10; + return TABLE_PAGE_SIZES.some((size) => size === n) ? n : 10; }); const [prevSkills, setPrevSkills] = useState(skills); if (skills !== prevSkills) { @@ -1505,6 +1592,7 @@ function SkillsTable({ skills }: { skills: Skill[] }) { point: { x: number; y: number }; skillFlatName: string; skillName: string; + kind: Skill['kind']; relPath: string; disabled: boolean; isInRepo: boolean; @@ -1512,10 +1600,17 @@ function SkillsTable({ skills }: { skills: Skill[] }) { const [confirmUninstall, setConfirmUninstall] = useState<{ flatName: string; name: string; + kind: Skill['kind']; } | null>(null); - const { uninstallMutation, uninstallRepoMutation: tableUninstallRepoMutation, setTargetMutation: targetMutation, buildSkillExtraItems: buildTableExtraItems } = useSkillActions(); + const { + uninstallMutation, + uninstallRepoMutation: tableUninstallRepoMutation, + setTargetMutation: targetMutation, + buildResourceExtraItems: buildTableExtraItems, + } = useResourceActions(); const [tableConfirmUninstallRepo, setTableConfirmUninstallRepo] = useState(null); + const { getSkillTargets } = useSyncMatrix(); // Available targets for the inline Select const { data: availableData } = useQuery({ @@ -1540,8 +1635,15 @@ function SkillsTable({ skills }: { skills: Skill[] }) { // Build action menu items const actionItems: ContextMenuItem[] = actionMenu ? buildTableExtraItems( - { flatName: actionMenu.skillFlatName, name: actionMenu.skillName, relPath: actionMenu.relPath, disabled: actionMenu.disabled, isInRepo: actionMenu.isInRepo }, - () => setConfirmUninstall({ flatName: actionMenu.skillFlatName, name: actionMenu.skillName }), + { + flatName: actionMenu.skillFlatName, + name: actionMenu.skillName, + relPath: actionMenu.relPath, + disabled: actionMenu.disabled, + isInRepo: actionMenu.isInRepo, + kind: actionMenu.kind, + }, + () => setConfirmUninstall({ flatName: actionMenu.skillFlatName, name: actionMenu.skillName, kind: actionMenu.kind }), (repoName) => { setTableConfirmUninstallRepo(repoName); setActionMenu(null); }, ) : []; @@ -1555,7 +1657,9 @@ function SkillsTable({ skills }: { skills: Skill[] }) { Name Type - Available in + + {resourceKind === 'agent' ? 'Synced to' : 'Available in'} + @@ -1563,6 +1667,7 @@ function SkillsTable({ skills }: { skills: Skill[] }) { {visible.map((skill) => { const currentValue = skill.targets?.length === 1 ? skill.targets[0] : '__all__'; const showPath = skill.relPath !== skill.name; + const agentTargets = summarizeAgentTargets(getSkillTargets(skill.flatName)); return (
{skill.name} @@ -1631,18 +1736,26 @@ function SkillsTable({ skills }: { skills: Skill[] }) { {/* Available in — inline Select */} e.stopPropagation()}> - { + targetMutation.mutate({ + name: skill.flatName, + target: val === '__all__' ? null : val, + }); + }} + options={targetOptions} + size="sm" + className="min-w-[7rem] max-w-[9rem]" + /> + )} {/* Actions ⋯ */} @@ -1656,6 +1769,7 @@ function SkillsTable({ skills }: { skills: Skill[] }) { point: { x: rect.right, y: rect.bottom }, skillFlatName: skill.flatName, skillName: skill.name, + kind: skill.kind, relPath: skill.relPath, disabled: !!skill.disabled, isInRepo: !!skill.isInRepo, @@ -1697,13 +1811,13 @@ function SkillsTable({ skills }: { skills: Skill[] }) { )} Are you sure you want to uninstall {confirmUninstall?.name}?} confirmText="Uninstall" variant="danger" loading={uninstallMutation.isPending} onConfirm={() => { - if (confirmUninstall) uninstallMutation.mutate(confirmUninstall.flatName); + if (confirmUninstall) uninstallMutation.mutate({ name: confirmUninstall.flatName, kind: confirmUninstall.kind }); setConfirmUninstall(null); }} onCancel={() => setConfirmUninstall(null)} diff --git a/ui/src/pages/TargetsPage.tsx b/ui/src/pages/TargetsPage.tsx index 7fbfa0fd..6f310080 100644 --- a/ui/src/pages/TargetsPage.tsx +++ b/ui/src/pages/TargetsPage.tsx @@ -550,7 +550,7 @@ export default function TargetsPage() { ); })()} {(target.include?.length || target.exclude?.length) ? 'Edit in Filter Studio →' : 'Customize filters →'} @@ -634,7 +634,7 @@ export default function TargetsPage() { +{overflowAgentFilters} more )} {agentFilters ? 'Edit in Filter Studio →' : 'Customize filters →'} From 2a91a72f6e78fa867cb3e19028a164ac931727ec Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 04:07:35 +0800 Subject: [PATCH 126/205] fix(server): sync matrix agent handling and stale test routes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix sync matrix using wrong builtin agent targets in project mode (DefaultAgentTargets → branch on IsProjectMode) - Fix sync matrix not skipping filters for agent symlink mode, matching the existing skill symlink behavior - Remove double-defensive slice copies in handleListTargets since targetsummary.Builder already clones Include/Exclude - Extract shared findAgent() to deduplicate discovery logic between resolveAgentResource and resolveAgentRelPathWithStatus - Update test URLs from /api/skills to /api/resources and response keys from skills/skill to resources/resource to match the route rename from the resources refactor --- internal/server/handler_create_skill_test.go | 14 ++-- internal/server/handler_skills_batch_test.go | 16 ++-- internal/server/handler_skills_test.go | 28 +++---- internal/server/handler_sync_matrix.go | 84 +++++++++++++++----- internal/server/handler_targets.go | 4 +- internal/server/resource_agents.go | 17 ++-- 6 files changed, 102 insertions(+), 61 deletions(-) diff --git a/internal/server/handler_create_skill_test.go b/internal/server/handler_create_skill_test.go index 4c30dab5..ecb5ea0e 100644 --- a/internal/server/handler_create_skill_test.go +++ b/internal/server/handler_create_skill_test.go @@ -15,7 +15,7 @@ import ( func TestHandleGetTemplates(t *testing.T) { s, _ := newTestServer(t) - req := httptest.NewRequest(http.MethodGet, "/api/skills/templates", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources/templates", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -51,7 +51,7 @@ func TestHandleCreateSkill_Success(t *testing.T) { s, src := newTestServer(t) body := `{"name":"my-tool","pattern":"tool-wrapper","category":"library","scaffoldDirs":["references"]}` - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -118,7 +118,7 @@ func TestHandleCreateSkill_EmptyScaffoldDirs(t *testing.T) { s, src := newTestServer(t) body := `{"name":"simple-skill","pattern":"tool-wrapper","category":"library","scaffoldDirs":[]}` - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -162,7 +162,7 @@ func TestHandleCreateSkill_InvalidName(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(tc.body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(tc.body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -184,7 +184,7 @@ func TestHandleCreateSkill_Duplicate(t *testing.T) { addSkill(t, src, "existing-skill") body := `{"name":"existing-skill","pattern":"none","category":"","scaffoldDirs":[]}` - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -204,7 +204,7 @@ func TestHandleCreateSkill_InvalidScaffoldDir(t *testing.T) { // tool-wrapper only allows "references", not "assets" body := `{"name":"bad-dirs","pattern":"tool-wrapper","category":"library","scaffoldDirs":["assets"]}` - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -223,7 +223,7 @@ func TestHandleCreateSkill_NonePattern(t *testing.T) { s, src := newTestServer(t) body := `{"name":"plain-skill","pattern":"none","category":"","scaffoldDirs":[]}` - req := httptest.NewRequest(http.MethodPost, "/api/skills", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) diff --git a/internal/server/handler_skills_batch_test.go b/internal/server/handler_skills_batch_test.go index b8f07da2..40d57d6a 100644 --- a/internal/server/handler_skills_batch_test.go +++ b/internal/server/handler_skills_batch_test.go @@ -45,7 +45,7 @@ func TestHandleBatchSetTargets_SetSingleTarget(t *testing.T) { addSkillNested(t, src, "frontend/skill-b") body := `{"folder":"frontend","target":"claude"}` - req := httptest.NewRequest(http.MethodPost, "/api/skills/batch/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources/batch/targets", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -84,7 +84,7 @@ func TestHandleBatchSetTargets_RemoveTargets(t *testing.T) { // Set target="" to remove targets (root-level folder) body := `{"folder":"","target":""}` - req := httptest.NewRequest(http.MethodPost, "/api/skills/batch/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources/batch/targets", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -114,7 +114,7 @@ func TestHandleBatchSetTargets_PathTraversal(t *testing.T) { s, _ := newTestServer(t) body := `{"folder":"../../../etc","target":"claude"}` - req := httptest.NewRequest(http.MethodPost, "/api/skills/batch/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources/batch/targets", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -132,7 +132,7 @@ func TestHandleBatchSetTargets_EmptyFolder_RootOnly(t *testing.T) { addSkillNested(t, src, "nested-folder/deep-skill") body := `{"folder":"","target":"cursor"}` - req := httptest.NewRequest(http.MethodPost, "/api/skills/batch/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources/batch/targets", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -190,7 +190,7 @@ func TestHandleBatchSetTargets_FolderTrailingSlash(t *testing.T) { // Trailing slash should still match (server cleans the input) body := `{"folder":"frontend/","target":"claude"}` - req := httptest.NewRequest(http.MethodPost, "/api/skills/batch/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPost, "/api/resources/batch/targets", bytes.NewBufferString(body)) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -214,7 +214,7 @@ func TestHandleSetSkillTargets_SetTarget(t *testing.T) { addSkill(t, src, "my-skill") body := `{"target":"claude"}` - req := httptest.NewRequest(http.MethodPatch, "/api/skills/my-skill/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPatch, "/api/resources/my-skill/targets", bytes.NewBufferString(body)) req.SetPathValue("name", "my-skill") rr := httptest.NewRecorder() s.handleSetSkillTargets(rr, req) @@ -244,7 +244,7 @@ func TestHandleSetSkillTargets_RemoveTarget(t *testing.T) { addSkillWithTargets(t, src, "my-skill", []string{"claude", "cursor"}) body := `{"target":""}` - req := httptest.NewRequest(http.MethodPatch, "/api/skills/my-skill/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPatch, "/api/resources/my-skill/targets", bytes.NewBufferString(body)) req.SetPathValue("name", "my-skill") rr := httptest.NewRecorder() s.handleSetSkillTargets(rr, req) @@ -265,7 +265,7 @@ func TestHandleSetSkillTargets_NotFound(t *testing.T) { s, _ := newTestServer(t) body := `{"target":"claude"}` - req := httptest.NewRequest(http.MethodPatch, "/api/skills/nonexistent/targets", bytes.NewBufferString(body)) + req := httptest.NewRequest(http.MethodPatch, "/api/resources/nonexistent/targets", bytes.NewBufferString(body)) req.SetPathValue("name", "nonexistent") rr := httptest.NewRecorder() s.handleSetSkillTargets(rr, req) diff --git a/internal/server/handler_skills_test.go b/internal/server/handler_skills_test.go index fa4d2e37..44452941 100644 --- a/internal/server/handler_skills_test.go +++ b/internal/server/handler_skills_test.go @@ -15,7 +15,7 @@ import ( func TestHandleListSkills_Empty(t *testing.T) { s, _ := newTestServer(t) - req := httptest.NewRequest(http.MethodGet, "/api/skills", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -24,11 +24,11 @@ func TestHandleListSkills_Empty(t *testing.T) { } var resp struct { - Skills []any `json:"skills"` + Resources []any `json:"resources"` } json.Unmarshal(rr.Body.Bytes(), &resp) - if len(resp.Skills) != 0 { - t.Errorf("expected 0 skills, got %d", len(resp.Skills)) + if len(resp.Resources) != 0 { + t.Errorf("expected 0 resources, got %d", len(resp.Resources)) } } @@ -37,7 +37,7 @@ func TestHandleListSkills_WithSkills(t *testing.T) { addSkill(t, src, "alpha") addSkill(t, src, "beta") - req := httptest.NewRequest(http.MethodGet, "/api/skills", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -46,11 +46,11 @@ func TestHandleListSkills_WithSkills(t *testing.T) { } var resp struct { - Skills []map[string]any `json:"skills"` + Resources []map[string]any `json:"resources"` } json.Unmarshal(rr.Body.Bytes(), &resp) - if len(resp.Skills) != 2 { - t.Errorf("expected 2 skills, got %d", len(resp.Skills)) + if len(resp.Resources) != 2 { + t.Errorf("expected 2 resources, got %d", len(resp.Resources)) } } @@ -58,7 +58,7 @@ func TestHandleGetSkill_Found(t *testing.T) { s, src := newTestServer(t) addSkill(t, src, "my-skill") - req := httptest.NewRequest(http.MethodGet, "/api/skills/my-skill", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources/my-skill", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -68,15 +68,15 @@ func TestHandleGetSkill_Found(t *testing.T) { var resp map[string]any json.Unmarshal(rr.Body.Bytes(), &resp) - skill := resp["skill"].(map[string]any) - if skill["flatName"] != "my-skill" { - t.Errorf("expected flatName 'my-skill', got %v", skill["flatName"]) + res := resp["resource"].(map[string]any) + if res["flatName"] != "my-skill" { + t.Errorf("expected flatName 'my-skill', got %v", res["flatName"]) } } func TestHandleGetSkill_NotFound(t *testing.T) { s, _ := newTestServer(t) - req := httptest.NewRequest(http.MethodGet, "/api/skills/nonexistent", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources/nonexistent", nil) rr := httptest.NewRecorder() s.handler.ServeHTTP(rr, req) @@ -93,7 +93,7 @@ func TestHandleGetSkillFile_PathTraversal(t *testing.T) { // to bypass mux and call the handler directly with a crafted PathValue. // Instead, test that a valid-looking but still-traversal path is rejected. // The handler checks strings.Contains(fp, ".."). - req := httptest.NewRequest(http.MethodGet, "/api/skills/my-skill/files/sub%2F..%2F..%2Fetc%2Fpasswd", nil) + req := httptest.NewRequest(http.MethodGet, "/api/resources/my-skill/files/sub%2F..%2F..%2Fetc%2Fpasswd", nil) rr := httptest.NewRecorder() s.mux.ServeHTTP(rr, req) diff --git a/internal/server/handler_sync_matrix.go b/internal/server/handler_sync_matrix.go index d1ef390d..769ec44e 100644 --- a/internal/server/handler_sync_matrix.go +++ b/internal/server/handler_sync_matrix.go @@ -36,7 +36,12 @@ func (s *Server) handleSyncMatrix(w http.ResponseWriter, r *http.Request) { discovered, _ := resource.AgentKind{}.Discover(agentsSource) agents = resource.ActiveAgents(discovered) } - builtinAgents := config.DefaultAgentTargets() + var builtinAgents map[string]config.TargetConfig + if s.IsProjectMode() { + builtinAgents = config.ProjectAgentTargets() + } else { + builtinAgents = config.DefaultAgentTargets() + } targetFilter := r.URL.Query().Get("target") @@ -78,15 +83,31 @@ func (s *Server) handleSyncMatrix(w http.ResponseWriter, r *http.Request) { if agentPath == "" || len(agents) == 0 { continue } - for _, agent := range agents { - status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, name, ac.Include, ac.Exclude) - entries = append(entries, syncMatrixEntry{ - Skill: agent.FlatName, - Target: name, - Status: status, - Reason: reason, - Kind: "agent", - }) + agentMode := ac.Mode + if agentMode == "" { + agentMode = "merge" + } + if agentMode == "symlink" { + for _, agent := range agents { + entries = append(entries, syncMatrixEntry{ + Skill: agent.FlatName, + Target: name, + Status: "na", + Reason: "symlink mode — filters not applicable", + Kind: "agent", + }) + } + } else { + for _, agent := range agents { + status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, name, ac.Include, ac.Exclude) + entries = append(entries, syncMatrixEntry{ + Skill: agent.FlatName, + Target: name, + Status: status, + Reason: reason, + Kind: "agent", + }) + } } } @@ -159,23 +180,44 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) ac := target.AgentsConfig() agentPath := ac.Path if agentPath == "" { - builtinAgents := config.DefaultAgentTargets() - if builtin, found := builtinAgents[body.Target]; found { + var previewBuiltin map[string]config.TargetConfig + if s.IsProjectMode() { + previewBuiltin = config.ProjectAgentTargets() + } else { + previewBuiltin = config.DefaultAgentTargets() + } + if builtin, found := previewBuiltin[body.Target]; found { agentPath = builtin.Path } } if agentPath != "" { discovered, _ := resource.AgentKind{}.Discover(agentsSource) agents := resource.ActiveAgents(discovered) - for _, agent := range agents { - status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, body.Target, body.AgentInclude, body.AgentExclude) - entries = append(entries, syncMatrixEntry{ - Skill: agent.FlatName, - Target: body.Target, - Status: status, - Reason: reason, - Kind: "agent", - }) + agentMode := ac.Mode + if agentMode == "" { + agentMode = "merge" + } + if agentMode == "symlink" { + for _, agent := range agents { + entries = append(entries, syncMatrixEntry{ + Skill: agent.FlatName, + Target: body.Target, + Status: "na", + Reason: "symlink mode — filters not applicable", + Kind: "agent", + }) + } + } else { + for _, agent := range agents { + status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, body.Target, body.AgentInclude, body.AgentExclude) + entries = append(entries, syncMatrixEntry{ + Skill: agent.FlatName, + Target: body.Target, + Status: status, + Reason: reason, + Kind: "agent", + }) + } } } } diff --git a/internal/server/handler_targets.go b/internal/server/handler_targets.go index 43469ff9..be8a35cc 100644 --- a/internal/server/handler_targets.go +++ b/internal/server/handler_targets.go @@ -155,8 +155,8 @@ func (s *Server) handleListTargets(w http.ResponseWriter, r *http.Request) { if agentSummary != nil { item.AgentPath = agentSummary.Path item.AgentMode = agentSummary.Mode - item.AgentInclude = append([]string(nil), agentSummary.Include...) - item.AgentExclude = append([]string(nil), agentSummary.Exclude...) + item.AgentInclude = agentSummary.Include + item.AgentExclude = agentSummary.Exclude item.AgentLinkedCount = intPtr(agentSummary.ManagedCount) item.AgentExpectedCount = intPtr(agentSummary.ExpectedCount) } diff --git a/internal/server/resource_agents.go b/internal/server/resource_agents.go index 8394cbdf..90603fc1 100644 --- a/internal/server/resource_agents.go +++ b/internal/server/resource_agents.go @@ -19,7 +19,7 @@ func matchesAgentName(d resource.DiscoveredResource, name string) bool { agentDisplayName(d.RelPath) == name } -func resolveAgentResource(agentsSource, name string) (resource.DiscoveredResource, error) { +func findAgent(agentsSource, name string) (resource.DiscoveredResource, error) { discovered, err := resource.AgentKind{}.Discover(agentsSource) if err != nil { return resource.DiscoveredResource{}, fmt.Errorf("failed to discover agents: %w", err) @@ -32,17 +32,16 @@ func resolveAgentResource(agentsSource, name string) (resource.DiscoveredResourc return resource.DiscoveredResource{}, fmt.Errorf("agent not found: %s", name) } +func resolveAgentResource(agentsSource, name string) (resource.DiscoveredResource, error) { + return findAgent(agentsSource, name) +} + func (s *Server) resolveAgentRelPathWithStatus(agentsSource, name string) (string, bool, error) { - discovered, err := resource.AgentKind{}.Discover(agentsSource) + d, err := findAgent(agentsSource, name) if err != nil { - return "", false, fmt.Errorf("failed to discover agents: %w", err) - } - for _, d := range discovered { - if matchesAgentName(d, name) { - return d.RelPath, d.Disabled, nil - } + return "", false, err } - return "", false, fmt.Errorf("agent not found: %s", name) + return d.RelPath, d.Disabled, nil } func agentMetaKey(relPath string) string { From 30da704406734d820ed7d2a94b10b83535c5f2eb Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 04:11:31 +0800 Subject: [PATCH 127/205] fix(sync): pass -g flag to cmdSyncExtras in global --all mode Without the explicit -g flag, cmdSyncExtras falls back to auto-detection which picks project mode when cwd contains .skillshare/config.yaml, causing 'sync --all -g' to display project extras messages instead of global ones. Mirrors the project-mode path that already passes -p. --- cmd/skillshare/sync.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/skillshare/sync.go b/cmd/skillshare/sync.go index 41c1d262..559d0bf0 100644 --- a/cmd/skillshare/sync.go +++ b/cmd/skillshare/sync.go @@ -294,7 +294,7 @@ func cmdSync(args []string) error { } if hasAll { - if extrasErr := cmdSyncExtras(rest); extrasErr != nil { + if extrasErr := cmdSyncExtras(append([]string{"-g"}, rest...)); extrasErr != nil { ui.Warning("Extras sync: %v", extrasErr) } } From d2bf6a0fe39c67cdec0906cc2334365e419e8651 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 04:18:08 +0800 Subject: [PATCH 128/205] docs: add agent management to README for v0.19.0 Update tagline, highlights, architecture diagram, and platform table to reflect agents as a core resource type alongside skills and extras. --- README.md | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 0c791333..528a0ab8 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@

- One source of truth for AI CLI skills, rules, commands & more. Sync everywhere with one command — from personal to organization-wide.
+ One source of truth for AI CLI skills, agents, rules, commands & more. Sync everywhere with one command — from personal to organization-wide.
Codex, Claude Code, OpenClaw, OpenCode & 50+ more.

@@ -40,7 +40,7 @@

> [!NOTE] -> **Latest**: [v0.18.3](https://github.com/runkids/skillshare/releases/tag/v0.18.3) — enable/disable skills, skills sub-key config, upgrade auto-sudo. [All releases →](https://github.com/runkids/skillshare/releases) +> **Latest**: [v0.19.0](https://github.com/runkids/skillshare/releases/tag/v0.19.0) — agent management, filter studio, unified resources UI. [All releases →](https://github.com/runkids/skillshare/releases) ## Why skillshare @@ -50,6 +50,7 @@ You edit in one, forget to copy to another, and lose track of what's where. skillshare fixes this: - **One source, every agent** — sync to Claude, Cursor, Codex & 50+ more with `skillshare sync` +- **Agent management** — sync custom agents alongside skills to agent-capable targets - **More than skills** — manage rules, commands, prompts & any file-based resource with [extras](https://skillshare.runkids.cc/docs/reference/targets/configuration#extras) - **Install from anywhere** — GitHub, GitLab, Bitbucket, Azure DevOps, or any self-hosted Git - **Built-in security** — audit skills for prompt injection and data exfiltration before use @@ -68,6 +69,7 @@ skillshare fixes this: ┌─────────────────────────────────────────────────────────────┐ │ Source Directory │ │ ~/.config/skillshare/skills/ ← skills (SKILL.md) │ +│ ~/.config/skillshare/agents/ ← agents │ │ ~/.config/skillshare/extras/ ← rules, commands, etc. │ └─────────────────────────────────────────────────────────────┘ │ sync @@ -78,10 +80,10 @@ skillshare fixes this: └───────────┘ └───────────┘ └───────────┘ ``` -| Platform | Skills Source | Extras Source | Link Type | -|----------|---------------|---------------|-----------| -| macOS/Linux | `~/.config/skillshare/skills/` | `~/.config/skillshare/extras/` | Symlinks | -| Windows | `%AppData%\skillshare\skills\` | `%AppData%\skillshare\extras\` | NTFS Junctions (no admin required) | +| Platform | Skills Source | Agents Source | Extras Source | Link Type | +|----------|---------------|---------------|---------------|-----------| +| macOS/Linux | `~/.config/skillshare/skills/` | `~/.config/skillshare/agents/` | `~/.config/skillshare/extras/` | Symlinks | +| Windows | `%AppData%\skillshare\skills\` | `%AppData%\skillshare\agents\` | `%AppData%\skillshare\extras\` | NTFS Junctions (no admin required) | | | Imperative (install-per-command) | Declarative (skillshare) | |---|---|---| @@ -180,6 +182,13 @@ skillshare audit skillshare init -p && skillshare sync ``` +**Agents** —sync custom agents to agent-capable targets + +```bash +skillshare sync agents # sync agents only +skillshare sync --all # sync skills + agents + extras together +``` + **Extras** —manage rules, commands, prompts & more ```bash From 2475acd35eb2b0dd6068345eb60478af8ad5a169 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 13:48:30 +0800 Subject: [PATCH 129/205] feat(ui): add agent support to batch uninstall and trash pages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add agent-mode batch uninstall handler with kind-based routing in handler_uninstall.go (skill/agent dispatch) - Add Skills/Agents tab navigation to BatchUninstallPage and TrashPage with per-tab filtering and scoped empty states - Extract SourceBadge component to replace duplicated getTypeLabel / skillTypeLabel helpers across ResourcesPage, ResourceDetailPage, BatchUninstallPage, and AnalyzePage - Add kind field to BatchUninstallRequest API type - Capitalize badge labels for consistency (tracked → Tracked, etc.) --- internal/server/handler_uninstall.go | 96 +++++++++++++++++++++++++++- ui/src/api/client.ts | 1 + ui/src/components/SourceBadge.tsx | 27 ++++++++ ui/src/pages/AnalyzePage.tsx | 4 +- ui/src/pages/BatchUninstallPage.tsx | 83 ++++++++++++++++++++---- ui/src/pages/ResourceDetailPage.tsx | 13 +--- ui/src/pages/ResourcesPage.tsx | 32 +++------- ui/src/pages/TrashPage.tsx | 60 +++++++++++++++-- 8 files changed, 260 insertions(+), 56 deletions(-) create mode 100644 ui/src/components/SourceBadge.tsx diff --git a/internal/server/handler_uninstall.go b/internal/server/handler_uninstall.go index 2b00d5ba..ff645c1f 100644 --- a/internal/server/handler_uninstall.go +++ b/internal/server/handler_uninstall.go @@ -49,7 +49,101 @@ func (s *Server) handleBatchUninstall(w http.ResponseWriter, r *http.Request) { writeError(w, http.StatusBadRequest, "names array is required and must not be empty") return } + if body.Kind != "" && body.Kind != "skill" && body.Kind != "agent" { + writeError(w, http.StatusBadRequest, "invalid kind: "+body.Kind) + return + } + + // Agent-mode batch uninstall + if body.Kind == "agent" { + s.handleBatchUninstallAgents(w, body, start) + return + } + + // Skill-mode (default) batch uninstall + s.handleBatchUninstallSkills(w, body, start) +} + +func (s *Server) handleBatchUninstallAgents(w http.ResponseWriter, body batchUninstallRequest, start time.Time) { + agentsSource := s.agentsSource() + if agentsSource == "" { + writeError(w, http.StatusInternalServerError, "agents source not configured") + return + } + + results := make([]batchUninstallItemResult, 0, len(body.Names)) + var removedNames []string + succeeded, failed := 0, 0 + var firstErr string + + for _, name := range body.Names { + res := batchUninstallItemResult{Name: name, Kind: "agent"} + + agent, err := resolveAgentResource(agentsSource, name) + if err != nil { + res.Success = false + res.Error = "agent not found: " + name + results = append(results, res) + failed++ + if firstErr == "" { + firstErr = res.Error + } + continue + } + + displayName := agentMetaKey(agent.RelPath) + legacySidecar := filepath.Join(filepath.Dir(agent.SourcePath), filepath.Base(displayName)+".skillshare-meta.json") + if _, err := trash.MoveAgentToTrash(agent.SourcePath, legacySidecar, displayName, s.agentTrashBase()); err != nil { + res.Success = false + res.Error = fmt.Sprintf("failed to trash agent: %v", err) + results = append(results, res) + failed++ + if firstErr == "" { + firstErr = res.Error + } + continue + } + + removedNames = append(removedNames, displayName) + res.Success = true + res.MovedToTrash = true + results = append(results, res) + succeeded++ + } + + if succeeded > 0 && s.agentsStore != nil { + for _, name := range removedNames { + s.agentsStore.Remove(name) + } + if err := s.agentsStore.Save(agentsSource); err != nil { + log.Printf("warning: failed to save agent metadata after batch uninstall: %v", err) + } + } + + status := "ok" + if failed > 0 && succeeded > 0 { + status = "partial" + } else if failed > 0 { + status = "error" + } + + s.writeOpsLog("uninstall", status, start, map[string]any{ + "names": body.Names, + "kind": "agent", + "scope": "ui", + "count": succeeded, + }, firstErr) + + writeJSON(w, map[string]any{ + "results": results, + "summary": batchUninstallSummary{ + Succeeded: succeeded, + Failed: failed, + }, + }) +} +func (s *Server) handleBatchUninstallSkills(w http.ResponseWriter, body batchUninstallRequest, start time.Time) { discovered, err := sync.DiscoverSourceSkills(s.cfg.Source) if err != nil { writeError(w, http.StatusInternalServerError, "failed to discover skills: "+err.Error()) @@ -70,7 +164,7 @@ func (s *Server) handleBatchUninstall(w http.ResponseWriter, r *http.Request) { var firstErr string for _, name := range body.Names { - res := batchUninstallItemResult{Name: name} + res := batchUninstallItemResult{Name: name, Kind: "skill"} if strings.HasPrefix(name, "_") { repoPath := filepath.Join(s.cfg.Source, name) diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index 86543088..705437d2 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -730,6 +730,7 @@ export interface BatchInstallResult { export interface BatchUninstallRequest { names: string[]; + kind?: 'skill' | 'agent'; force?: boolean; } diff --git a/ui/src/components/SourceBadge.tsx b/ui/src/components/SourceBadge.tsx new file mode 100644 index 00000000..a341fc96 --- /dev/null +++ b/ui/src/components/SourceBadge.tsx @@ -0,0 +1,27 @@ +import Badge from './Badge'; + +type SourceType = 'tracked' | 'github' | 'local'; + +interface SourceBadgeProps { + type?: string; + isInRepo?: boolean; + size?: 'sm' | 'md'; +} + +function resolveSource(type?: string, isInRepo?: boolean): SourceType { + if (isInRepo) return 'tracked'; + if (type === 'github' || type === 'github-subdir') return 'github'; + return 'local'; +} + +const config: Record = { + tracked: { label: 'Tracked', variant: 'default' }, + github: { label: 'GitHub', variant: 'info' }, + local: { label: 'Local', variant: 'default' }, +}; + +export default function SourceBadge({ type, isInRepo, size = 'sm' }: SourceBadgeProps) { + const source = resolveSource(type, isInRepo); + const { label, variant } = config[source]; + return {label}; +} diff --git a/ui/src/pages/AnalyzePage.tsx b/ui/src/pages/AnalyzePage.tsx index d388f6c5..d545d1f8 100644 --- a/ui/src/pages/AnalyzePage.tsx +++ b/ui/src/pages/AnalyzePage.tsx @@ -715,7 +715,7 @@ function SkillTable({
{skill.path} - {skill.is_tracked && tracked} + {skill.is_tracked && Tracked}
{formatTokens(skill.description_tokens)} @@ -786,7 +786,7 @@ function SkillDetailDialog({

{skill.path}

- {skill.is_tracked && tracked} + {skill.is_tracked && Tracked}
+ ))} + + {/* ── Sticky Toolbar (matches SkillsPage pattern) ── */}
{/* Search row — full width */} @@ -440,9 +497,7 @@ export default function BatchUninstallPage() { {repo && ( {repo.replace(/^_/, '')} )} - - {skill.isInRepo ? 'tracked' : (getTypeLabel(skill.type) ?? 'local')} - +
); diff --git a/ui/src/pages/ResourceDetailPage.tsx b/ui/src/pages/ResourceDetailPage.tsx index 483abaa8..83bc0611 100644 --- a/ui/src/pages/ResourceDetailPage.tsx +++ b/ui/src/pages/ResourceDetailPage.tsx @@ -11,6 +11,7 @@ import { useQuery, useQueryClient } from '@tanstack/react-query'; import { queryKeys, staleTimes } from '../lib/queryKeys'; import Badge from '../components/Badge'; import KindBadge from '../components/KindBadge'; +import SourceBadge from '../components/SourceBadge'; import Card from '../components/Card'; import CopyButton from '../components/CopyButton'; import Button from '../components/Button'; @@ -96,13 +97,6 @@ function parseSkillMarkdown(content: string): { manifest: SkillManifest; markdow return { manifest, markdown }; } -function skillTypeLabel(type?: string): string { - if (!type) return 'Local'; - if (type === 'github-subdir') return 'GitHub'; - if (type === 'github') return 'GitHub'; - return type.charAt(0).toUpperCase() + type.slice(1); -} - /** Returns a lucide icon component + color class for a filename */ function getFileIcon(filename: string): { icon: typeof File; className: string } { if (filename === 'SKILL.md') return { icon: FileText, className: 'text-blue' }; @@ -384,9 +378,8 @@ export default function SkillDetailPage() { {resource.name} - {resource.disabled && disabled} - {resource.isInRepo && tracked repo} - {skillTypeLabel(resource.type) && {skillTypeLabel(resource.type)}} + {resource.disabled && Disabled} + {resource.targets && resource.targets.length > 0 && ( diff --git a/ui/src/pages/ResourcesPage.tsx b/ui/src/pages/ResourcesPage.tsx index cc1c897b..4ed5939a 100644 --- a/ui/src/pages/ResourcesPage.tsx +++ b/ui/src/pages/ResourcesPage.tsx @@ -34,6 +34,7 @@ import type { GridComponents } from 'react-virtuoso'; import { queryKeys, staleTimes } from '../lib/queryKeys'; import Badge from '../components/Badge'; import KindBadge from '../components/KindBadge'; +import SourceBadge from '../components/SourceBadge'; import { Input, Select, type SelectOption } from '../components/Input'; import { PageSkeleton } from '../components/Skeleton'; import EmptyState from '../components/EmptyState'; @@ -531,11 +532,6 @@ function matchFilter(skill: Skill, filterType: FilterType): boolean { } } -function getTypeLabel(type?: string): string | undefined { - if (!type) return undefined; - if (type === 'github-subdir') return 'github'; - return type; -} function sortSkills(skills: Skill[], sortType: SortType): Skill[] { const sorted = [...skills]; @@ -673,9 +669,8 @@ const SkillPostit = memo(function SkillPostit({ )}
- {skill.disabled && disabled} - {skill.isInRepo && tracked} - {!skill.isInRepo && getTypeLabel(skill.type) && {getTypeLabel(skill.type)}} + {skill.disabled && Disabled} + {skill.branch && ( @@ -1391,13 +1386,8 @@ function FolderTreeView({ skills, resourceKind, totalCount, isSearching, stickyT } {skill.name} - {skill.disabled && disabled} - {skill.isInRepo - ? tracked - : getTypeLabel(skill.type) - ? {getTypeLabel(skill.type)} - : local - } + {skill.disabled && Disabled} + {skill.branch && ( @@ -1682,7 +1672,7 @@ function SkillsTable({ skills, resourceKind }: { skills: Skill[]; resourceKind: ? 'var(--color-pencil-light)' : 'var(--color-muted)', }} - title={skill.isInRepo ? 'tracked' : 'local'} + title={skill.isInRepo ? 'Tracked' : 'Local'} /> {/* Name + path subtitle + source */} @@ -1718,14 +1708,8 @@ function SkillsTable({ skills, resourceKind }: { skills: Skill[]; resourceKind: {/* Type badges */}
- {skill.disabled && disabled} - {skill.isInRepo ? ( - tracked - ) : getTypeLabel(skill.type) ? ( - {getTypeLabel(skill.type)} - ) : ( - local - )} + {skill.disabled && Disabled} + {skill.branch && ( diff --git a/ui/src/pages/TrashPage.tsx b/ui/src/pages/TrashPage.tsx index f98e6282..36729bd0 100644 --- a/ui/src/pages/TrashPage.tsx +++ b/ui/src/pages/TrashPage.tsx @@ -1,10 +1,12 @@ -import { useState } from 'react'; +import { useState, useMemo } from 'react'; import { Trash2, Clock, RotateCcw, X, RefreshCw, + Puzzle, + Bot, } from 'lucide-react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import { api } from '../api/client'; @@ -54,7 +56,19 @@ export default function TrashPage() { const [emptyOpen, setEmptyOpen] = useState(false); const [emptying, setEmptying] = useState(false); - const items = data?.items ?? []; + const allItems = data?.items ?? []; + + // Tab state + type ResourceTab = 'skills' | 'agents'; + const [activeTab, setActiveTab] = useState('skills'); + const skillCount = useMemo(() => allItems.filter((i) => (i.kind ?? 'skill') !== 'agent').length, [allItems]); + const agentCount = useMemo(() => allItems.filter((i) => i.kind === 'agent').length, [allItems]); + const items = useMemo( + () => activeTab === 'agents' + ? allItems.filter((i) => i.kind === 'agent') + : allItems.filter((i) => (i.kind ?? 'skill') !== 'agent'), + [allItems, activeTab], + ); const handleRefresh = () => { queryClient.invalidateQueries({ queryKey: queryKeys.trash }); @@ -126,12 +140,13 @@ export default function TrashPage() { subtitle={isProjectMode ? 'Recently deleted project skills and agents are kept for 7 days before automatic cleanup' : 'Recently deleted skills and agents are kept for 7 days before automatic cleanup'} + className="mb-4!" actions={ <> - {items.length > 0 && ( + {allItems.length > 0 && ( @@ -140,6 +155,39 @@ export default function TrashPage() { } /> + {/* Resource type tabs (Skills / Agents) */} + + {/* Summary line */} {items.length > 0 && (

@@ -152,8 +200,10 @@ export default function TrashPage() { {items.length === 0 ? ( ) : (

From 6d1f675a1704d2745ae817e6c94d84b9224ccfb8 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:14:05 +0800 Subject: [PATCH 130/205] fix(sync): strip .md extension in agent filter matching and add tests FilterAgents matched against FlatName which includes .md (e.g. "tutor.md"), so user patterns like "tutor" or "team-*" silently matched nothing. Strip the .md suffix before pattern comparison so agent filters behave consistently with skill filters. Add FilterAgents unit tests (include, exclude, combined, nested, glob, invalid pattern) and integration tests for both filter paths: - include/exclude config patterns (4 tests) - .agentignore disabled state (3 tests: explicit, glob, nested) --- internal/sync/filter.go | 6 +- internal/sync/filter_test.go | 79 +++++++ tests/integration/sync_agent_test.go | 312 +++++++++++++++++++++++++++ 3 files changed, 396 insertions(+), 1 deletion(-) diff --git a/internal/sync/filter.go b/internal/sync/filter.go index 65cbdf87..d437696e 100644 --- a/internal/sync/filter.go +++ b/internal/sync/filter.go @@ -28,6 +28,9 @@ func FilterSkills(skills []DiscoveredSkill, include, exclude []string) ([]Discov } // FilterAgents filters discovered agents by include/exclude patterns. +// Agent FlatNames include the .md extension (e.g. "tutor.md"), but filter +// patterns are matched against the name without extension so users can write +// intuitive patterns like "tutor" or "team-*" instead of "tutor.md". func FilterAgents(agents []resource.DiscoveredResource, include, exclude []string) ([]resource.DiscoveredResource, error) { includePatterns, excludePatterns, err := normalizedFilterPatterns(include, exclude) if err != nil { @@ -36,7 +39,8 @@ func FilterAgents(agents []resource.DiscoveredResource, include, exclude []strin filtered := make([]resource.DiscoveredResource, 0, len(agents)) for _, agent := range agents { - if shouldSyncFlatName(agent.FlatName, includePatterns, excludePatterns) { + name := strings.TrimSuffix(agent.FlatName, ".md") + if shouldSyncFlatName(name, includePatterns, excludePatterns) { filtered = append(filtered, agent) } } diff --git a/internal/sync/filter_test.go b/internal/sync/filter_test.go index 2c0eb43e..0410e27e 100644 --- a/internal/sync/filter_test.go +++ b/internal/sync/filter_test.go @@ -3,6 +3,8 @@ package sync import ( "reflect" "testing" + + "skillshare/internal/resource" ) func TestFilterSkills_IncludeOnly(t *testing.T) { @@ -84,6 +86,83 @@ func TestShouldSyncFlatName(t *testing.T) { } } +// --- FilterAgents tests --- + +func TestFilterAgents_IncludeOnly(t *testing.T) { + // FlatNames include .md; patterns should match without extension + agents := testAgents("code-reviewer.md", "tutor.md", "debugger.md") + filtered, err := FilterAgents(agents, []string{"code-*", "tutor"}, nil) + if err != nil { + t.Fatalf("FilterAgents returned error: %v", err) + } + assertAgentFlatNames(t, filtered, []string{"code-reviewer.md", "tutor.md"}) +} + +func TestFilterAgents_ExcludeOnly(t *testing.T) { + agents := testAgents("code-reviewer.md", "tutor.md", "debugger.md") + filtered, err := FilterAgents(agents, nil, []string{"tutor", "debug*"}) + if err != nil { + t.Fatalf("FilterAgents returned error: %v", err) + } + assertAgentFlatNames(t, filtered, []string{"code-reviewer.md"}) +} + +func TestFilterAgents_IncludeThenExclude(t *testing.T) { + agents := testAgents("team-reviewer.md", "team-debugger.md", "personal-tutor.md") + filtered, err := FilterAgents(agents, []string{"team-*"}, []string{"*-debugger"}) + if err != nil { + t.Fatalf("FilterAgents returned error: %v", err) + } + assertAgentFlatNames(t, filtered, []string{"team-reviewer.md"}) +} + +func TestFilterAgents_EmptyPatternsReturnAll(t *testing.T) { + agents := testAgents("a.md", "b.md", "c.md") + filtered, err := FilterAgents(agents, nil, nil) + if err != nil { + t.Fatalf("FilterAgents returned error: %v", err) + } + assertAgentFlatNames(t, filtered, []string{"a.md", "b.md", "c.md"}) +} + +func TestFilterAgents_NestedFlatNames(t *testing.T) { + agents := testAgents("team__reviewer.md", "team__debugger.md", "personal__tutor.md") + filtered, err := FilterAgents(agents, []string{"team__*"}, nil) + if err != nil { + t.Fatalf("FilterAgents returned error: %v", err) + } + assertAgentFlatNames(t, filtered, []string{"team__reviewer.md", "team__debugger.md"}) +} + +func TestFilterAgents_InvalidPattern(t *testing.T) { + agents := testAgents("one.md") + if _, err := FilterAgents(agents, []string{"["}, nil); err == nil { + t.Fatal("expected invalid include pattern error") + } + if _, err := FilterAgents(agents, nil, []string{"["}); err == nil { + t.Fatal("expected invalid exclude pattern error") + } +} + +func testAgents(names ...string) []resource.DiscoveredResource { + agents := make([]resource.DiscoveredResource, 0, len(names)) + for _, name := range names { + agents = append(agents, resource.DiscoveredResource{FlatName: name, Kind: "agent"}) + } + return agents +} + +func assertAgentFlatNames(t *testing.T, agents []resource.DiscoveredResource, want []string) { + t.Helper() + got := make([]string, 0, len(agents)) + for _, a := range agents { + got = append(got, a.FlatName) + } + if !reflect.DeepEqual(got, want) { + t.Fatalf("agent flat names = %v, want %v", got, want) + } +} + func testSkills(names ...string) []DiscoveredSkill { skills := make([]DiscoveredSkill, 0, len(names)) for _, name := range names { diff --git a/tests/integration/sync_agent_test.go b/tests/integration/sync_agent_test.go index 72c9b79e..1ff3adeb 100644 --- a/tests/integration/sync_agent_test.go +++ b/tests/integration/sync_agent_test.go @@ -10,6 +10,318 @@ import ( "skillshare/internal/testutil" ) +func TestSync_Agents_IncludeFilter(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor"), 0644) + os.WriteFile(filepath.Join(agentsDir, "reviewer.md"), []byte("# Reviewer"), 0644) + os.WriteFile(filepath.Join(agentsDir, "debugger.md"), []byte("# Debugger"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + include: + - "tutor" + - "reviewer" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Included agents should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); err != nil { + t.Error("tutor.md should be synced (included)") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "reviewer.md")); err != nil { + t.Error("reviewer.md should be synced (included)") + } + + // Excluded agent should NOT be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "debugger.md")); !os.IsNotExist(err) { + t.Error("debugger.md should NOT be synced (not in include list)") + } +} + +func TestSync_Agents_ExcludeFilter(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "tutor.md"), []byte("# Tutor"), 0644) + os.WriteFile(filepath.Join(agentsDir, "reviewer.md"), []byte("# Reviewer"), 0644) + os.WriteFile(filepath.Join(agentsDir, "debugger.md"), []byte("# Debugger"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + exclude: + - "debugger" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Non-excluded agents should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); err != nil { + t.Error("tutor.md should be synced (not excluded)") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "reviewer.md")); err != nil { + t.Error("reviewer.md should be synced (not excluded)") + } + + // Excluded agent should NOT be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "debugger.md")); !os.IsNotExist(err) { + t.Error("debugger.md should NOT be synced (excluded)") + } +} + +func TestSync_Agents_IncludeExcludeCombined(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "team-reviewer.md"), []byte("# Team Reviewer"), 0644) + os.WriteFile(filepath.Join(agentsDir, "team-debugger.md"), []byte("# Team Debugger"), 0644) + os.WriteFile(filepath.Join(agentsDir, "personal-tutor.md"), []byte("# Personal Tutor"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + include: + - "team-*" + exclude: + - "*-debugger" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // team-reviewer matches include and not exclude → synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "team-reviewer.md")); err != nil { + t.Error("team-reviewer.md should be synced (included, not excluded)") + } + + // team-debugger matches include but also matches exclude → NOT synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "team-debugger.md")); !os.IsNotExist(err) { + t.Error("team-debugger.md should NOT be synced (excluded by *-debugger)") + } + + // personal-tutor does not match include → NOT synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "personal-tutor.md")); !os.IsNotExist(err) { + t.Error("personal-tutor.md should NOT be synced (not in include list)") + } +} + +func TestSync_Agents_GlobExcludePattern(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "alpha.md"), []byte("# Alpha"), 0644) + os.WriteFile(filepath.Join(agentsDir, "beta.md"), []byte("# Beta"), 0644) + os.WriteFile(filepath.Join(agentsDir, "gamma.md"), []byte("# Gamma"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" + exclude: + - "?eta" + - "gamma" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // alpha doesn't match any exclude → synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "alpha.md")); err != nil { + t.Error("alpha.md should be synced") + } + + // beta matches ?eta → NOT synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "beta.md")); !os.IsNotExist(err) { + t.Error("beta.md should NOT be synced (excluded by ?eta)") + } + + // gamma matches gamma → NOT synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "gamma.md")); !os.IsNotExist(err) { + t.Error("gamma.md should NOT be synced (excluded by gamma)") + } +} + +func TestSync_Agents_DisabledAgentsNotSynced(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "active.md"), []byte("# Active"), 0644) + os.WriteFile(filepath.Join(agentsDir, "disabled-one.md"), []byte("# Disabled One"), 0644) + os.WriteFile(filepath.Join(agentsDir, "disabled-two.md"), []byte("# Disabled Two"), 0644) + + // Disable two agents via .agentignore + os.WriteFile(filepath.Join(agentsDir, ".agentignore"), []byte("disabled-one.md\ndisabled-two.md\n"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Active agent should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "active.md")); err != nil { + t.Error("active.md should be synced") + } + + // Disabled agents should NOT be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "disabled-one.md")); !os.IsNotExist(err) { + t.Error("disabled-one.md should NOT be synced (disabled via .agentignore)") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "disabled-two.md")); !os.IsNotExist(err) { + t.Error("disabled-two.md should NOT be synced (disabled via .agentignore)") + } +} + +func TestSync_Agents_DisabledByGlobPattern(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(agentsDir, 0755) + os.WriteFile(filepath.Join(agentsDir, "prod-reviewer.md"), []byte("# Prod"), 0644) + os.WriteFile(filepath.Join(agentsDir, "draft-experiment.md"), []byte("# Draft 1"), 0644) + os.WriteFile(filepath.Join(agentsDir, "draft-wip.md"), []byte("# Draft 2"), 0644) + + // Glob pattern disables all draft-* agents + os.WriteFile(filepath.Join(agentsDir, ".agentignore"), []byte("draft-*\n"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Non-draft agent should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "prod-reviewer.md")); err != nil { + t.Error("prod-reviewer.md should be synced") + } + + // Draft agents should NOT be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "draft-experiment.md")); !os.IsNotExist(err) { + t.Error("draft-experiment.md should NOT be synced (disabled by draft-* pattern)") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "draft-wip.md")); !os.IsNotExist(err) { + t.Error("draft-wip.md should NOT be synced (disabled by draft-* pattern)") + } +} + +func TestSync_Agents_DisabledNestedAgent(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + os.MkdirAll(filepath.Join(agentsDir, "team"), 0755) + os.WriteFile(filepath.Join(agentsDir, "top-level.md"), []byte("# Top"), 0644) + os.WriteFile(filepath.Join(agentsDir, "team", "reviewer.md"), []byte("# Reviewer"), 0644) + os.WriteFile(filepath.Join(agentsDir, "team", "debugger.md"), []byte("# Debugger"), 0644) + + // Disable one nested agent + os.WriteFile(filepath.Join(agentsDir, ".agentignore"), []byte("team/debugger.md\n"), 0644) + + claudeSkills := filepath.Join(sb.Home, ".claude", "skills") + claudeAgents := filepath.Join(sb.Home, ".claude", "agents") + os.MkdirAll(claudeSkills, 0755) + os.MkdirAll(claudeAgents, 0755) + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: "` + claudeSkills + `" + agents: + path: "` + claudeAgents + `" +`) + + result := sb.RunCLI("sync", "agents") + result.AssertSuccess(t) + + // Top-level and enabled nested agent should be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "top-level.md")); err != nil { + t.Error("top-level.md should be synced") + } + if _, err := os.Lstat(filepath.Join(claudeAgents, "team__reviewer.md")); err != nil { + t.Error("team__reviewer.md should be synced") + } + + // Disabled nested agent should NOT be synced + if _, err := os.Lstat(filepath.Join(claudeAgents, "team__debugger.md")); !os.IsNotExist(err) { + t.Error("team__debugger.md should NOT be synced (disabled via .agentignore)") + } +} + func TestSync_Agents_SkipsTargetsWithoutAgentsPath(t *testing.T) { sb := testutil.NewSandbox(t) defer sb.Cleanup() From baf6b181ae4c911fe357d02805f7dc4f53112be4 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:18:28 +0800 Subject: [PATCH 131/205] feat(audit): add Kind field to Result, Noun helpers to kind_filter, remove --all flag - audit.Result gains a Kind string field ("skill"|"agent", set by caller) so callers can tag results for display without touching the scan engine - resourceKindFilter gets Noun(count) and SingularNoun() helpers to produce correct singular/plural display nouns per resource type - audit command switches from parseKindArgWithAll to parseKindArg, dropping the --all flag and its two help-text lines; agents subcommand is the way to target agents explicitly --- cmd/skillshare/audit.go | 2 +- cmd/skillshare/audit_render.go | 4 +--- cmd/skillshare/kind_filter.go | 22 ++++++++++++++++++++++ internal/audit/audit.go | 1 + 4 files changed, 25 insertions(+), 4 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index da4487f3..a4d31a86 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -131,7 +131,7 @@ func cmdAudit(args []string) error { applyModeLabel(mode) // Extract kind filter (e.g. "skillshare audit agents" or "--all"). - kind, rest := parseKindArgWithAll(rest) + kind, rest := parseKindArg(rest) // Check for "rules" subcommand before standard audit arg parsing. if len(rest) > 0 && rest[0] == "rules" { diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index a3315e9f..25eca17d 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -398,7 +398,6 @@ Arguments: path Existing file/directory path to scan (optional) Options: - --all Scan both skills and agents --group, -G Scan all skills in a group (repeatable) -p, --project Use project-level skills -g, --global Use global skills @@ -434,6 +433,5 @@ Examples: skillshare audit --format markdown # Output Markdown report (for GitHub Issues/PRs) skillshare audit --json # Same as --format json (deprecated) skillshare audit -p --init-rules # Create project custom rules file - skillshare audit agents # Scan agents only - skillshare audit --all # Scan skills + agents`) + skillshare audit agents # Scan agents only`) } diff --git a/cmd/skillshare/kind_filter.go b/cmd/skillshare/kind_filter.go index 4087e40c..e7a8c771 100644 --- a/cmd/skillshare/kind_filter.go +++ b/cmd/skillshare/kind_filter.go @@ -97,6 +97,28 @@ func (k resourceKindFilter) String() string { } } +// Noun returns the pluralized resource noun for display. +// Noun(1) → "skill"/"agent", Noun(2+) → "skills"/"agents". +func (k resourceKindFilter) Noun(count int) string { + switch k { + case kindAgents: + if count == 1 { + return "agent" + } + return "agents" + default: + if count == 1 { + return "skill" + } + return "skills" + } +} + +// SingularNoun returns the singular resource noun (no count needed). +func (k resourceKindFilter) SingularNoun() string { + return k.Noun(1) +} + func (k resourceKindFilter) IncludesSkills() bool { return k == kindAll || k == kindSkills } diff --git a/internal/audit/audit.go b/internal/audit/audit.go index ba1bc638..832a4156 100644 --- a/internal/audit/audit.go +++ b/internal/audit/audit.go @@ -130,6 +130,7 @@ type Finding struct { // Result holds all findings for a single skill. type Result struct { SkillName string `json:"skillName"` + Kind string `json:"kind,omitempty"` // "skill" or "agent" — set by caller Findings []Finding `json:"findings"` RiskScore int `json:"riskScore"` RiskLabel string `json:"riskLabel"` // "clean", "low", "medium", "high", "critical" From c6f24ab34a274e1852e81a96d7ec768f8ca512a8 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:23:20 +0800 Subject: [PATCH 132/205] feat(audit): dynamize skill/agent terminology in CLI output Thread kind parameter through auditInstalled, auditFiltered, and auditSkillByName. All output text now uses Noun() for correct skill(s)/agent(s) display. Set Result.Kind on all scan results. Also update audit_project.go callers to pass kindSkills. --- cmd/skillshare/audit.go | 48 ++++++++++++++++----------------- cmd/skillshare/audit_project.go | 4 +-- cmd/skillshare/audit_render.go | 12 ++++++--- 3 files changed, 34 insertions(+), 30 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index a4d31a86..a5b8d9aa 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -244,13 +244,13 @@ func cmdAudit(args []string) error { switch { case !hasTargets: - results, summary, err = auditInstalled(sourcePath, modeString(mode), projectRoot, threshold, opts, registry) + results, summary, err = auditInstalled(sourcePath, modeString(mode), projectRoot, threshold, kind, opts, registry) case isSinglePath: results, summary, err = auditPath(opts.Targets[0], modeString(mode), projectRoot, threshold, opts.Format, opts.PolicyLine, registry) case isSingleName: - results, summary, err = auditSkillByName(sourcePath, opts.Targets[0], modeString(mode), projectRoot, threshold, opts.Format, opts.PolicyLine, registry) + results, summary, err = auditSkillByName(sourcePath, opts.Targets[0], modeString(mode), projectRoot, threshold, opts.Format, opts.PolicyLine, kind, registry) default: - results, summary, err = auditFiltered(sourcePath, opts.Targets, opts.Groups, modeString(mode), projectRoot, threshold, opts, registry) + results, summary, err = auditFiltered(sourcePath, opts.Targets, opts.Groups, modeString(mode), projectRoot, threshold, kind, opts, registry) } if err != nil { logAuditOp(cfgPath, rest, summary, start, err, false) @@ -501,7 +501,7 @@ func scanPathTarget(targetPath, projectRoot string, registry *audit.Registry) (* return audit.ScanFile(targetPath) } -func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { +func auditInstalled(sourcePath, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { jsonOutput := opts.isStructured() base := auditRunSummary{ Scope: "all", @@ -512,7 +512,7 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO // Phase 0: discover skills. var spinner *ui.Spinner if !jsonOutput { - spinner = ui.StartSpinner("Discovering skills...") + spinner = ui.StartSpinner(fmt.Sprintf("Discovering %s...", kind.Noun(2))) } skillPaths, err := collectInstalledSkillPaths(sourcePath) if err != nil { @@ -523,18 +523,18 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO } if len(skillPaths) == 0 { if spinner != nil { - spinner.Success("No skills found") + spinner.Success(fmt.Sprintf("No %s found", kind.Noun(2))) } return []*audit.Result{}, base, nil } if spinner != nil { - spinner.Success(fmt.Sprintf("Found %d skill(s)", len(skillPaths))) + spinner.Success(fmt.Sprintf("Found %d %s", len(skillPaths), kind.Noun(len(skillPaths)))) } // Phase 0.5: large audit confirmation prompt. if len(skillPaths) > largeAuditThreshold && !jsonOutput && !opts.Yes && ui.IsTTY() { - ui.Warning("Found %d skills. This may take a while.", len(skillPaths)) - ui.Info("Tip: use 'audit --group ' or 'audit ' to scan specific skills") + ui.Warning("Found %d %s. This may take a while.", len(skillPaths), kind.Noun(len(skillPaths))) + ui.Info("Tip: use 'audit --group ' or 'audit ' to scan specific %s", kind.Noun(2)) fmt.Print(" Continue? [y/N]: ") var answer string fmt.Scanln(&answer) @@ -547,7 +547,7 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO var headerMinWidth int if !jsonOutput { fmt.Println() - subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %d skills for threats", len(skillPaths)), mode, sourcePath, threshold, opts.PolicyLine) + subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %d %s for threats", len(skillPaths), kind.Noun(len(skillPaths))), mode, sourcePath, threshold, opts.PolicyLine) headerMinWidth = auditHeaderMinWidth(subtitle) ui.HeaderBoxWithMinWidth(auditHeaderTitle(mode), subtitle, headerMinWidth) } @@ -558,7 +558,7 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO } var progressBar *ui.ProgressBar if !jsonOutput { - progressBar = ui.StartProgress("Scanning skills", len(skillPaths)) + progressBar = ui.StartProgress(fmt.Sprintf("Scanning %s", kind.Noun(2)), len(skillPaths)) } onDone := func() { if progressBar != nil { @@ -588,6 +588,7 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO } sr.Result.Threshold = threshold sr.Result.IsBlocked = sr.Result.HasSeverityAtOrAbove(threshold) + sr.Result.Kind = kind.SingularNoun() // Use relative path so TUI shows group hierarchy (e.g. "frontend/vue/skill"). if rel, err := filepath.Rel(sourcePath, sr.Result.ScanTarget); err == nil && rel != sr.Result.SkillName { sr.Result.SkillName = rel @@ -608,14 +609,14 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, opts auditO } applyPolicyToSummary(&summary, opts) - if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth); err != nil { + if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, kind); err != nil { return results, summary, err } return results, summary, nil } -func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, threshold string, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { +func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { jsonOutput := opts.isStructured() base := auditRunSummary{ Scope: "filtered", @@ -672,7 +673,7 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, } for _, w := range warnings { if !jsonOutput { - ui.Warning("skill not found: %s", w) + ui.Warning("%s not found: %s", kind.SingularNoun(), w) } } @@ -684,7 +685,7 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, var headerMinWidth int if !jsonOutput { fmt.Println() - subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %d skills for threats", len(matched)), mode, sourcePath, threshold, opts.PolicyLine) + subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %d %s for threats", len(matched), kind.Noun(len(matched))), mode, sourcePath, threshold, opts.PolicyLine) headerMinWidth = auditHeaderMinWidth(subtitle) ui.HeaderBoxWithMinWidth(auditHeaderTitle(mode), subtitle, headerMinWidth) } @@ -695,7 +696,7 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, } var progressBar *ui.ProgressBar if !jsonOutput { - progressBar = ui.StartProgress("Scanning skills", len(matched)) + progressBar = ui.StartProgress(fmt.Sprintf("Scanning %s", kind.Noun(2)), len(matched)) } onDone := func() { if progressBar != nil { @@ -725,6 +726,7 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, } sr.Result.Threshold = threshold sr.Result.IsBlocked = sr.Result.HasSeverityAtOrAbove(threshold) + sr.Result.Kind = kind.SingularNoun() if rel, err := filepath.Rel(sourcePath, sr.Result.ScanTarget); err == nil && rel != sr.Result.SkillName { sr.Result.SkillName = rel } @@ -744,14 +746,14 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, } applyPolicyToSummary(&summary, opts) - if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth); err != nil { + if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, kind); err != nil { return results, summary, err } return results, summary, nil } -func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, policyLine string, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { +func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, policyLine string, kind resourceKindFilter, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { summary := auditRunSummary{ Scope: "single", Skill: name, @@ -764,7 +766,7 @@ func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, po // Short-name fallback: search installed skills by flat name or basename. resolved := resolveSkillPath(sourcePath, name) if resolved == "" { - return nil, summary, fmt.Errorf("skill not found: %s", name) + return nil, summary, fmt.Errorf("%s not found: %s", kind.SingularNoun(), name) } skillPath = resolved } @@ -777,6 +779,7 @@ func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, po elapsed := time.Since(start) result.Threshold = threshold result.IsBlocked = result.HasSeverityAtOrAbove(threshold) + result.Kind = kind.SingularNoun() if rel, err := filepath.Rel(sourcePath, result.ScanTarget); err == nil && rel != result.SkillName { result.SkillName = rel } @@ -786,12 +789,9 @@ func auditSkillByName(sourcePath, name, mode, projectRoot, threshold, format, po summary.Skill = name summary.Mode = mode if format == formatText { - label := "skill" - if strings.HasSuffix(strings.ToLower(name), ".md") { - label = "agent" - } + label := kind.SingularNoun() subtitle := auditHeaderSubtitle(fmt.Sprintf("Scanning %s: %s", label, name), mode, sourcePath, threshold, policyLine) - summaryLines := buildAuditSummaryLines(summary) + summaryLines := buildAuditSummaryLines(summary, kind) minWidth := auditHeaderMinWidth(subtitle) ui.HeaderBoxWithMinWidth(auditHeaderTitle(mode), subtitle, minWidth) fmt.Println() diff --git a/cmd/skillshare/audit_project.go b/cmd/skillshare/audit_project.go index 0f714d56..f688d6b4 100644 --- a/cmd/skillshare/audit_project.go +++ b/cmd/skillshare/audit_project.go @@ -22,10 +22,10 @@ func cmdAuditProject(root, specificSkill string) (auditRunSummary, bool, error) } if specificSkill != "" { - _, summary, err := auditSkillByName(rt.sourcePath, specificSkill, "project", root, threshold, formatText, "", nil) + _, summary, err := auditSkillByName(rt.sourcePath, specificSkill, "project", root, threshold, formatText, "", kindSkills, nil) return summary, summary.Failed > 0, err } - _, summary, err := auditInstalled(rt.sourcePath, "project", root, threshold, auditOptions{}, nil) + _, summary, err := auditInstalled(rt.sourcePath, "project", root, threshold, kindSkills, auditOptions{}, nil) return summary, summary.Failed > 0, err } diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index 25eca17d..8c5c9c1b 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -21,7 +21,7 @@ func riskColor(label string) string { // presentAuditResults handles the common output path for audit scans: // prints per-skill list only when TUI is unavailable, always prints summary, // and launches TUI when conditions are met. -func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanOutputs []audit.ScanOutput, summary auditRunSummary, jsonOutput bool, opts auditOptions, headerMinWidth int) error { +func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanOutputs []audit.ScanOutput, summary auditRunSummary, jsonOutput bool, opts auditOptions, headerMinWidth int, kind ...resourceKindFilter) error { useTUI := !jsonOutput && shouldLaunchTUI(opts.NoTUI, nil) && len(results) > 1 if !jsonOutput { @@ -37,7 +37,7 @@ func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanO } fmt.Println() } - summaryLines := buildAuditSummaryLines(summary) + summaryLines := buildAuditSummaryLines(summary, kind...) printAuditSummary(summary, summaryLines, headerMinWidth) } @@ -155,7 +155,7 @@ func printSkillResult(result *audit.Result, elapsed time.Duration) { } // buildAuditSummaryLines builds the summary box lines (without printing). -func buildAuditSummaryLines(summary auditRunSummary) []string { +func buildAuditSummaryLines(summary auditRunSummary, kind ...resourceKindFilter) []string { var lines []string maxSeverity := summary.MaxSeverity if maxSeverity == "" { @@ -167,8 +167,12 @@ func buildAuditSummaryLines(summary auditRunSummary) []string { lines = append(lines, fmt.Sprintf(" Max sev: %s", ui.Colorize(ui.SeverityColor(maxSeverity), maxSeverity))) // -- Result counts -- + noun := "skill(s)" + if len(kind) > 0 { + noun = kind[0].Noun(summary.Scanned) + } lines = append(lines, "") - lines = append(lines, fmt.Sprintf(" Scanned: %d skill(s)", summary.Scanned)) + lines = append(lines, fmt.Sprintf(" Scanned: %d %s", summary.Scanned, noun)) lines = append(lines, fmt.Sprintf(" Passed: %d", summary.Passed)) if summary.Warning > 0 { lines = append(lines, fmt.Sprintf(" Warning: %s", ui.Colorize(ui.Yellow, fmt.Sprintf("%d", summary.Warning)))) From e1c94b5427d076fe119e7d8136f210f71b11c4a9 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:24:43 +0800 Subject: [PATCH 133/205] feat(server): add ?kind=agents param to audit API handlers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add resolveAuditSource() helper that returns source dir and result kind based on ?kind query param (agents → agentsSource(), default → skills) - Update handleAuditAll() to use resolveAuditSource() and set Kind on all results after processAuditResults() - Update handleAuditStream() with the same pattern for SSE endpoint - Update toAuditResponse() to propagate result.Kind into response JSON --- internal/server/handler_audit.go | 15 ++++++++++++++- internal/server/handler_audit_stream.go | 5 ++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/internal/server/handler_audit.go b/internal/server/handler_audit.go index 9a7326bd..b5a39a2f 100644 --- a/internal/server/handler_audit.go +++ b/internal/server/handler_audit.go @@ -251,10 +251,19 @@ func processAuditResults(skills []skillEntry, scanned []audit.ScanOutput, policy } } +// resolveAuditSource returns the source directory and result kind based on ?kind query param. +func (s *Server) resolveAuditSource(r *http.Request) (string, string) { + kind := r.URL.Query().Get("kind") + if kind == "agents" { + return s.agentsSource(), "agent" + } + return s.cfg.Source, "skill" +} + func (s *Server) handleAuditAll(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() - source := s.cfg.Source + source, resultKind := s.resolveAuditSource(r) policy := s.auditPolicy() projectRoot := s.projectRoot cfgPath := s.configPath() @@ -277,6 +286,9 @@ func (s *Server) handleAuditAll(w http.ResponseWriter, r *http.Request) { scanned := audit.ParallelScan(skillsToAuditInputs(skills), auditProjectRoot, nil, nil) agg := processAuditResults(skills, scanned, policy) + for i := range agg.Results { + agg.Results[i].Kind = resultKind + } writeAuditLogTo(cfgPath, agg.Status, start, agg.LogArgs, agg.Message) writeJSON(w, map[string]any{ @@ -674,6 +686,7 @@ func toAuditResponse(result *audit.Result) auditResultResponse { } return auditResultResponse{ SkillName: result.SkillName, + Kind: result.Kind, Findings: findings, RiskScore: result.RiskScore, RiskLabel: result.RiskLabel, diff --git a/internal/server/handler_audit_stream.go b/internal/server/handler_audit_stream.go index 668b92d2..fd1e2899 100644 --- a/internal/server/handler_audit_stream.go +++ b/internal/server/handler_audit_stream.go @@ -24,7 +24,7 @@ func (s *Server) handleAuditStream(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before slow I/O. s.mu.RLock() - source := s.cfg.Source + source, resultKind := s.resolveAuditSource(r) projectRoot := s.projectRoot policy := s.auditPolicy() s.mu.RUnlock() @@ -70,6 +70,9 @@ func (s *Server) handleAuditStream(w http.ResponseWriter, r *http.Request) { // 4. Process results agg := processAuditResults(skills, outputs, policy) + for i := range agg.Results { + agg.Results[i].Kind = resultKind + } s.writeAuditLog(agg.Status, start, agg.LogArgs, agg.Message) // 5. Send final result (no concurrent writers at this point) From 86461ecc7d110c3d2a09c9d45932a587657aab7d Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:31:14 +0800 Subject: [PATCH 134/205] feat(audit): add Skills/Agents tab switching to TUI Two-tab TUI following list_tui.go pattern. Tab/Shift+Tab cycles between Skills and Agents tabs. Initial tab matches CLI argument. Secondary kind scanned lazily when TUI launches via auditTUIContext. Also adds agentsSourcePath param to auditInstalled/auditFiltered and launchAuditTUIWithTabs helper for dual-kind scanning. --- cmd/skillshare/audit.go | 30 ++++-- cmd/skillshare/audit_project.go | 2 +- cmd/skillshare/audit_render.go | 83 +++++++++++++++- cmd/skillshare/audit_tui.go | 169 +++++++++++++++++++++++++------- 4 files changed, 241 insertions(+), 43 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index a5b8d9aa..5e56c490 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -244,13 +244,13 @@ func cmdAudit(args []string) error { switch { case !hasTargets: - results, summary, err = auditInstalled(sourcePath, modeString(mode), projectRoot, threshold, kind, opts, registry) + results, summary, err = auditInstalled(sourcePath, agentsSourcePath, modeString(mode), projectRoot, threshold, kind, opts, registry) case isSinglePath: results, summary, err = auditPath(opts.Targets[0], modeString(mode), projectRoot, threshold, opts.Format, opts.PolicyLine, registry) case isSingleName: results, summary, err = auditSkillByName(sourcePath, opts.Targets[0], modeString(mode), projectRoot, threshold, opts.Format, opts.PolicyLine, kind, registry) default: - results, summary, err = auditFiltered(sourcePath, opts.Targets, opts.Groups, modeString(mode), projectRoot, threshold, kind, opts, registry) + results, summary, err = auditFiltered(sourcePath, agentsSourcePath, opts.Targets, opts.Groups, modeString(mode), projectRoot, threshold, kind, opts, registry) } if err != nil { logAuditOp(cfgPath, rest, summary, start, err, false) @@ -501,7 +501,7 @@ func scanPathTarget(targetPath, projectRoot string, registry *audit.Registry) (* return audit.ScanFile(targetPath) } -func auditInstalled(sourcePath, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { +func auditInstalled(sourcePath, agentsSourcePath, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { jsonOutput := opts.isStructured() base := auditRunSummary{ Scope: "all", @@ -609,14 +609,23 @@ func auditInstalled(sourcePath, mode, projectRoot, threshold string, kind resour } applyPolicyToSummary(&summary, opts) - if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, kind); err != nil { + tuiCtx := &auditTUIContext{ + kind: kind, + sourcePath: sourcePath, + agentsSourcePath: agentsSourcePath, + projectRoot: projectRoot, + threshold: threshold, + registry: reg, + mode: mode, + } + if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, tuiCtx); err != nil { return results, summary, err } return results, summary, nil } -func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { +func auditFiltered(sourcePath, agentsSourcePath string, names, groups []string, mode, projectRoot, threshold string, kind resourceKindFilter, opts auditOptions, reg *audit.Registry) ([]*audit.Result, auditRunSummary, error) { jsonOutput := opts.isStructured() base := auditRunSummary{ Scope: "filtered", @@ -746,7 +755,16 @@ func auditFiltered(sourcePath string, names, groups []string, mode, projectRoot, } applyPolicyToSummary(&summary, opts) - if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, kind); err != nil { + tuiCtx := &auditTUIContext{ + kind: kind, + sourcePath: sourcePath, + agentsSourcePath: agentsSourcePath, + projectRoot: projectRoot, + threshold: threshold, + registry: reg, + mode: mode, + } + if err := presentAuditResults(results, elapsed, scanResults, summary, jsonOutput, opts, headerMinWidth, tuiCtx); err != nil { return results, summary, err } diff --git a/cmd/skillshare/audit_project.go b/cmd/skillshare/audit_project.go index f688d6b4..b649a7ff 100644 --- a/cmd/skillshare/audit_project.go +++ b/cmd/skillshare/audit_project.go @@ -26,6 +26,6 @@ func cmdAuditProject(root, specificSkill string) (auditRunSummary, bool, error) return summary, summary.Failed > 0, err } - _, summary, err := auditInstalled(rt.sourcePath, "project", root, threshold, kindSkills, auditOptions{}, nil) + _, summary, err := auditInstalled(rt.sourcePath, "", "project", root, threshold, kindSkills, auditOptions{}, nil) return summary, summary.Failed > 0, err } diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index 8c5c9c1b..6f9d08d0 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -2,6 +2,7 @@ package main import ( "fmt" + "path/filepath" "sort" "strings" "time" @@ -18,10 +19,21 @@ func riskColor(label string) string { return ui.Dim } +// auditTUIContext carries info needed to scan the "other" kind for TUI tab switching. +type auditTUIContext struct { + kind resourceKindFilter + sourcePath string // skills source (always) + agentsSourcePath string // agents source (always) + projectRoot string + threshold string + registry *audit.Registry + mode string +} + // presentAuditResults handles the common output path for audit scans: // prints per-skill list only when TUI is unavailable, always prints summary, // and launches TUI when conditions are met. -func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanOutputs []audit.ScanOutput, summary auditRunSummary, jsonOutput bool, opts auditOptions, headerMinWidth int, kind ...resourceKindFilter) error { +func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanOutputs []audit.ScanOutput, summary auditRunSummary, jsonOutput bool, opts auditOptions, headerMinWidth int, tuiCtx *auditTUIContext) error { useTUI := !jsonOutput && shouldLaunchTUI(opts.NoTUI, nil) && len(results) > 1 if !jsonOutput { @@ -37,16 +49,81 @@ func presentAuditResults(results []*audit.Result, elapsed []time.Duration, scanO } fmt.Println() } - summaryLines := buildAuditSummaryLines(summary, kind...) + var kindSlice []resourceKindFilter + if tuiCtx != nil { + kindSlice = []resourceKindFilter{tuiCtx.kind} + } + summaryLines := buildAuditSummaryLines(summary, kindSlice...) printAuditSummary(summary, summaryLines, headerMinWidth) } if useTUI { - return runAuditTUI(results, scanOutputs, summary) + if tuiCtx != nil { + return launchAuditTUIWithTabs(results, scanOutputs, summary, tuiCtx) + } + return runAuditTUI(results, scanOutputs, summary, nil, nil, auditRunSummary{}, auditTabSkills) } return nil } +func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOutput, summary auditRunSummary, ctx *auditTUIContext) error { + initialTab := auditTabSkills + if ctx.kind == kindAgents { + initialTab = auditTabAgents + } + + // Scan the "other" kind for the second tab. + var otherResults []*audit.Result + var otherOutputs []audit.ScanOutput + var otherSummary auditRunSummary + + otherSource := ctx.agentsSourcePath + otherKind := "agent" + if ctx.kind == kindAgents { + otherSource = ctx.sourcePath + otherKind = "skill" + } + + if otherSource != "" { + otherPaths, err := collectInstalledSkillPaths(otherSource) + if err == nil && len(otherPaths) > 0 { + otherScanResults := audit.ParallelScan(toAuditInputs(otherPaths), ctx.projectRoot, nil, ctx.registry) + for i := range otherPaths { + if i < len(otherScanResults) { + sr := otherScanResults[i] + if sr.Err == nil { + sr.Result.Threshold = ctx.threshold + sr.Result.IsBlocked = sr.Result.HasSeverityAtOrAbove(ctx.threshold) + sr.Result.Kind = otherKind + if rel, relErr := filepath.Rel(otherSource, sr.Result.ScanTarget); relErr == nil { + sr.Result.SkillName = rel + } + otherResults = append(otherResults, sr.Result) + otherOutputs = append(otherOutputs, sr) + } + } + } + otherSummary = summarizeAuditResults(len(otherPaths), otherResults, ctx.threshold) + otherSummary.Mode = ctx.mode + } + } + + // Arrange into skills vs agents. + var skillResults, agentResults []*audit.Result + var skillOutputs, agentOutputs []audit.ScanOutput + var skillSummary, agentSummary auditRunSummary + + if ctx.kind == kindAgents { + agentResults, agentOutputs, agentSummary = results, scanOutputs, summary + skillResults, skillOutputs, skillSummary = otherResults, otherOutputs, otherSummary + } else { + skillResults, skillOutputs, skillSummary = results, scanOutputs, summary + agentResults, agentOutputs, agentSummary = otherResults, otherOutputs, otherSummary + } + + return runAuditTUI(skillResults, skillOutputs, skillSummary, agentResults, agentOutputs, agentSummary, initialTab) +} + // printSkillResultLine prints a single-line result for a skill during batch scan. func printSkillResultLine(index, total int, result *audit.Result, elapsed time.Duration) { prefix := fmt.Sprintf("[%d/%d]", index, total) diff --git a/cmd/skillshare/audit_tui.go b/cmd/skillshare/audit_tui.go index 1ca13cd7..75592292 100644 --- a/cmd/skillshare/audit_tui.go +++ b/cmd/skillshare/audit_tui.go @@ -15,6 +15,20 @@ import ( "github.com/charmbracelet/lipgloss" ) +type auditTab int + +const ( + auditTabSkills auditTab = iota + auditTabAgents +) + +func (t auditTab) noun() string { + if t == auditTabAgents { + return "agents" + } + return "skills" +} + // ac holds audit-specific styles that don't belong in the shared tc palette. var ac = struct { File lipgloss.Style // file:line locations — cyan @@ -38,6 +52,7 @@ func acSevCount(count int, style lipgloss.Style) lipgloss.Style { type auditItem struct { result *audit.Result elapsed time.Duration + kind string // "skill" or "agent" } func (i auditItem) Title() string { @@ -212,61 +227,83 @@ type auditTUIModel struct { termHeight int summary auditRunSummary + + // Tab switching (skills ↔ agents) + activeTab auditTab + skillItems []auditItem + agentItems []auditItem + skillSummary auditRunSummary + agentSummary auditRunSummary + tabCounts [2]int // [skills, agents] } -func newAuditTUIModel(results []*audit.Result, scanOutputs []audit.ScanOutput, summary auditRunSummary) auditTUIModel { - // Build items sorted: by severity (findings first), then by name. - items := make([]auditItem, 0, len(results)) - for idx, r := range results { - var elapsed time.Duration - if idx < len(scanOutputs) { - elapsed = scanOutputs[idx].Elapsed - } - items = append(items, auditItem{result: r, elapsed: elapsed}) - } +func sortAuditItems(items []auditItem) { sort.Slice(items, func(i, j int) bool { ri, rj := items[i].result, items[j].result - // Primary: group by repo key (tracked repos first, then standalone). ki, kj := auditRepoKey(ri.SkillName), auditRepoKey(rj.SkillName) if ki != kj { - // Both tracked: alphabetical if ki != "" && kj != "" { return ki < kj } - // Tracked before standalone return ki != "" } - // Secondary: skills with findings come first. hasI, hasJ := len(ri.Findings) > 0, len(rj.Findings) > 0 if hasI != hasJ { return hasI } if hasI && hasJ { - // Higher severity (lower rank) first. rankI := audit.SeverityRank(ri.MaxSeverity()) rankJ := audit.SeverityRank(rj.MaxSeverity()) if rankI != rankJ { return rankI < rankJ } - // Higher risk score first. if ri.RiskScore != rj.RiskScore { return ri.RiskScore > rj.RiskScore } } return ri.SkillName < rj.SkillName }) +} - // Cap items for list widget performance. - allItems := items - displayItems := items +func newAuditTUIModel( + skillResults []*audit.Result, skillOutputs []audit.ScanOutput, skillSummary auditRunSummary, + agentResults []*audit.Result, agentOutputs []audit.ScanOutput, agentSummary auditRunSummary, + initialTab auditTab, +) auditTUIModel { + buildItems := func(results []*audit.Result, outputs []audit.ScanOutput, kind string) []auditItem { + items := make([]auditItem, 0, len(results)) + for idx, r := range results { + var elapsed time.Duration + if idx < len(outputs) { + elapsed = outputs[idx].Elapsed + } + items = append(items, auditItem{result: r, elapsed: elapsed, kind: kind}) + } + sortAuditItems(items) + return items + } + + skillItems := buildItems(skillResults, skillOutputs, "skill") + agentItems := buildItems(agentResults, agentOutputs, "agent") + + var activeItems []auditItem + var activeSummary auditRunSummary + if initialTab == auditTabAgents { + activeItems = agentItems + activeSummary = agentSummary + } else { + activeItems = skillItems + activeSummary = skillSummary + } + + displayItems := activeItems if len(displayItems) > maxListItems { displayItems = displayItems[:maxListItems] } - listItems := buildGroupedAuditItems(displayItems) l := list.New(listItems, auditDelegate{}, 0, 0) - l.Title = fmt.Sprintf("Audit results (%d scanned)", summary.Scanned) + l.Title = fmt.Sprintf("Audit results (%d scanned)", activeSummary.Scanned) l.Styles.Title = tc.ListTitle l.SetShowStatusBar(false) l.SetFilteringEnabled(false) @@ -279,16 +316,38 @@ func newAuditTUIModel(results []*audit.Result, scanOutputs []audit.ScanOutput, s fi.Cursor.Style = tc.Filter m := auditTUIModel{ - list: l, - allItems: allItems, - matchCount: len(allItems), - filterInput: fi, - summary: summary, + list: l, + allItems: activeItems, + matchCount: len(activeItems), + filterInput: fi, + summary: activeSummary, + activeTab: initialTab, + skillItems: skillItems, + agentItems: agentItems, + skillSummary: skillSummary, + agentSummary: agentSummary, + tabCounts: [2]int{len(skillItems), len(agentItems)}, } skipGroupItem(&m.list, 1) return m } +func (m *auditTUIModel) switchTab() { + if m.activeTab == auditTabAgents { + m.allItems = m.agentItems + m.summary = m.agentSummary + } else { + m.allItems = m.skillItems + m.summary = m.skillSummary + } + m.filterText = "" + m.filterInput.SetValue("") + m.detailScroll = 0 + m.applyFilter() + m.list.Title = fmt.Sprintf("Audit results (%d scanned)", m.summary.Scanned) + skipGroupItem(&m.list, 1) +} + func (m auditTUIModel) Init() tea.Cmd { return nil } func (m *auditTUIModel) applyFilter() { @@ -391,6 +450,14 @@ func (m auditTUIModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.detailScroll = 0 } return m, nil + case "tab": + m.activeTab = (m.activeTab + 1) % 2 + m.switchTab() + return m, nil + case "shift+tab": + m.activeTab = (m.activeTab - 1 + 2) % 2 + m.switchTab() + return m, nil } } @@ -426,6 +493,9 @@ func (m auditTUIModel) View() string { // ── Horizontal split layout ── var b strings.Builder + b.WriteString(m.renderTabBar()) + b.WriteString("\n") + panelHeight := m.auditPanelHeight() leftWidth := auditListWidth(m.termWidth) @@ -465,7 +535,7 @@ func (m auditTUIModel) View() string { b.WriteString(m.renderSummaryFooter()) // Help line - b.WriteString(tc.Help.Render(appendScrollInfo("↑↓ navigate ←→ page / filter Ctrl+d/u scroll detail q quit", scrollInfo))) + b.WriteString(tc.Help.Render(appendScrollInfo("Tab skills/agents ↑↓ navigate ←→ page / filter Ctrl+d/u scroll detail q quit", scrollInfo))) return b.String() } @@ -474,6 +544,9 @@ func (m auditTUIModel) View() string { func (m auditTUIModel) viewVertical() string { var b strings.Builder + b.WriteString(m.renderTabBar()) + b.WriteString("\n") + b.WriteString(m.list.View()) b.WriteString("\n\n") @@ -489,17 +562,43 @@ func (m auditTUIModel) viewVertical() string { b.WriteString(m.renderSummaryFooter()) - b.WriteString(tc.Help.Render(appendScrollInfo("↑↓ navigate ←→ page / filter Ctrl+d/u scroll q quit", scrollInfo))) + b.WriteString(tc.Help.Render(appendScrollInfo("Tab skills/agents ↑↓ navigate ←→ page / filter Ctrl+d/u scroll q quit", scrollInfo))) b.WriteString("\n") return b.String() } +func (m auditTUIModel) renderTabBar() string { + type tab struct { + label string + tab auditTab + count int + } + tabs := []tab{ + {"Skills", auditTabSkills, m.tabCounts[0]}, + {"Agents", auditTabAgents, m.tabCounts[1]}, + } + + activeStyle := lipgloss.NewStyle().Bold(true).Underline(true) + inactiveStyle := tc.Dim + + var parts []string + for _, t := range tabs { + label := fmt.Sprintf("%s(%d)", t.label, t.count) + if t.tab == m.activeTab { + parts = append(parts, activeStyle.Inherit(tc.Cyan).Render(label)) + } else { + parts = append(parts, inactiveStyle.Render(label)) + } + } + return " " + strings.Join(parts, " ") +} + func (m auditTUIModel) renderFilterBar() string { return renderTUIFilterBar( m.filterInput.View(), m.filtering, m.filterText, m.matchCount, len(m.allItems), maxListItems, - "results", m.renderPageInfo(), + m.activeTab.noun(), m.renderPageInfo(), ) } @@ -716,9 +815,9 @@ func (m auditTUIModel) renderDetailContent(item auditItem) string { } // auditFooterLines returns the number of lines the footer occupies below the panel. -// gap(2) + filter(1) + summary(1-2) + help(1) = 5 or 6 +// gap(2) + tab(1) + filter(1) + summary(1-2) + help(1) = 6 or 7 func (m auditTUIModel) auditFooterLines() int { - n := 5 // gap(2) + filter + summary-line1 + help + n := 6 // gap(2) + tab(1) + filter + summary-line1 + help if len(m.summary.ByCategory) > 0 { n++ // summary-line2 (threats) } @@ -803,8 +902,12 @@ func findingMetaTUI(f audit.Finding) string { } // runAuditTUI starts the bubbletea TUI for audit results. -func runAuditTUI(results []*audit.Result, scanOutputs []audit.ScanOutput, summary auditRunSummary) error { - model := newAuditTUIModel(results, scanOutputs, summary) +func runAuditTUI( + skillResults []*audit.Result, skillOutputs []audit.ScanOutput, skillSummary auditRunSummary, + agentResults []*audit.Result, agentOutputs []audit.ScanOutput, agentSummary auditRunSummary, + initialTab auditTab, +) error { + model := newAuditTUIModel(skillResults, skillOutputs, skillSummary, agentResults, agentOutputs, agentSummary, initialTab) p := tea.NewProgram(model, tea.WithAltScreen(), tea.WithMouseCellMotion()) _, err := p.Run() return err From 833fc86632155ad84927365fa3226346be89e0ed Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:36:32 +0800 Subject: [PATCH 135/205] feat(ui): add Skills/Agents tab bar to audit page Tab bar switches between skills and agents audit scans. Each tab caches results independently. API calls include ?kind parameter. Display text dynamized for skill/agent context. --- ui/src/api/client.ts | 6 ++-- ui/src/pages/AuditPage.tsx | 56 +++++++++++++++++++++++++++++--------- 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index 705437d2..7afba776 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -430,7 +430,8 @@ export const api = { }, // Audit - auditAll: () => apiFetch('/audit'), + auditAll: (kind?: 'skills' | 'agents') => + apiFetch(`/audit${kind ? '?kind=' + kind : ''}`), auditSkill: (name: string, kind?: 'skill' | 'agent') => apiFetch(`/audit/${encodeURIComponent(name)}${kind === 'agent' ? '?kind=agent' : ''}`), auditAllStream: ( @@ -438,8 +439,9 @@ export const api = { onProgress: (scanned: number) => void, onDone: (data: AuditAllResponse) => void, onError: (err: Error) => void, + kind?: 'skills' | 'agents', ): EventSource => - createSSEStream(BASE + '/audit/stream', { + createSSEStream(BASE + `/audit/stream${kind ? '?kind=' + kind : ''}`, { start: (d) => onStart(d.total), progress: (d) => onProgress(d.scanned), done: onDone, diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index 357d9d62..95241781 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -29,6 +29,7 @@ import ScrollToTop from '../components/ScrollToTop'; import KindBadge from '../components/KindBadge'; type SeverityFilter = 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO'; +type AuditKind = 'skills' | 'agents'; const severityFilterOptions: { value: SeverityFilter; label: string }[] = [ { value: 'INFO', label: 'All (INFO+)' }, @@ -40,7 +41,12 @@ const severityFilterOptions: { value: SeverityFilter; label: string }[] = [ export default function AuditPage() { const { toast } = useToast(); - const [data, setData] = useState(null); + const [activeKind, setActiveKind] = useState('skills'); + const [dataCache, setDataCache] = useState>({ + skills: null, + agents: null, + }); + const data = dataCache[activeKind]; const [loading, setLoading] = useState(false); const [error, setError] = useState(null); const [minSeverity, setMinSeverity] = useState('MEDIUM'); @@ -83,16 +89,17 @@ export default function AuditPage() { const showAuditToast = useCallback((res: AuditAllResponse) => { const { summary } = res; + const noun = activeKind === 'agents' ? 'agent(s)' : 'skill(s)'; if (summary.failed > 0) { - toast(`Audit complete: ${summary.failed} skill(s) blocked at ${summary.threshold}+`, 'warning'); + toast(`Audit complete: ${summary.failed} ${noun} blocked at ${summary.threshold}+`, 'warning'); } else if (summary.warning > 0) { - toast(`Audit complete: ${summary.warning} skill(s) with warnings`, 'warning'); + toast(`Audit complete: ${summary.warning} ${noun} with warnings`, 'warning'); } else if (summary.low > 0 || summary.info > 0) { toast(`Audit complete: ${summary.low + summary.info} informational findings`, 'warning'); } else { - toast('Audit complete: all skills passed', 'success'); + toast(`Audit complete: all ${activeKind} passed`, 'success'); } - }, [toast]); + }, [toast, activeKind]); const runAudit = () => { setLoading(true); @@ -104,7 +111,7 @@ export default function AuditPage() { (total) => setProgress({ scanned: 0, total }), (scanned) => setProgress((p) => p ? { ...p, scanned } : null), (res) => { - setData(res); + setDataCache((prev) => ({ ...prev, [activeKind]: res })); setLoading(false); setProgress(null); showAuditToast(res); @@ -115,6 +122,7 @@ export default function AuditPage() { setProgress(null); toast(err.message, 'error'); }, + activeKind, ); }; @@ -148,6 +156,28 @@ export default function AuditPage() { />
+ {/* Kind tabs */} +
+ {(['skills', 'agents'] as const).map((kind) => ( + + ))} +
+ {/* Loading / Progress */} {loading && ( )} @@ -193,7 +223,7 @@ export default function AuditPage() { {totalFindings === 0 ? ( ) : filteredResults.length === 0 ? ( @@ -223,7 +253,7 @@ export default function AuditPage() { - {data.summary.passed} skill{data.summary.passed !== 1 ? 's' : ''} passed with no issues + {data.summary.passed} {activeKind === 'agents' ? 'agent' : 'skill'}{data.summary.passed !== 1 ? 's' : ''} passed with no issues
@@ -235,8 +265,8 @@ export default function AuditPage() { {!data && !loading && !error && ( Run Audit From 26c50503473d66a38a1777dbc856f5e34b751c39 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:37:53 +0800 Subject: [PATCH 136/205] test(audit): verify agents/skills audit uses correct terminology TestAudit_AgentsTerminology: ensures 'audit agents' says agent, not skill TestAudit_SkillsTerminology: ensures default audit says skill, not agent --- tests/integration/audit_test.go | 37 +++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/integration/audit_test.go b/tests/integration/audit_test.go index f344847e..b8bd59e7 100644 --- a/tests/integration/audit_test.go +++ b/tests/integration/audit_test.go @@ -1558,3 +1558,40 @@ func TestAudit_ProfileInConfig(t *testing.T) { t.Fatalf("expected policyDedupe=global from strict profile default, got %s", payload.Summary.PolicyDedupe) } } + +func TestAudit_AgentsTerminology(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + // Create an agent in the agents source directory. + agentsDir := filepath.Join(sb.Home, ".config", "skillshare", "agents") + agentDir := filepath.Join(agentsDir, "test-agent") + os.MkdirAll(agentDir, 0o755) + os.WriteFile(filepath.Join(agentDir, "agent.md"), []byte("# Test Agent\nA safe agent."), 0o644) + + sb.WriteConfig("source: " + sb.SourcePath + "\nagents_source: " + agentsDir + "\ntargets: {}\n") + + result := sb.RunCLI("audit", "agents", "--no-tui") + result.AssertSuccess(t) + // Output should use "agent" terminology, not "skill" + result.AssertAnyOutputContains(t, "agent") + result.AssertOutputNotContains(t, "skill(s)") + result.AssertOutputNotContains(t, "Scanned: 1 skill") +} + +func TestAudit_SkillsTerminology(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.CreateSkill("my-skill", map[string]string{ + "SKILL.md": "---\nname: my-skill\n---\n# Safe skill", + }) + sb.WriteConfig("source: " + sb.SourcePath + "\ntargets: {}\n") + + result := sb.RunCLI("audit", "--no-tui") + result.AssertSuccess(t) + // Default audit should use "skill" terminology + result.AssertAnyOutputContains(t, "skill") + result.AssertOutputNotContains(t, "agent(s)") + result.AssertOutputNotContains(t, "Scanned: 1 agent") +} From 1b9ba787195ace311879e5bff1ba0a38c0eb3598 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:45:24 +0800 Subject: [PATCH 137/205] fix(audit): scan agents as individual files, not directories MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Agents are individual .md files discovered by resource.AgentKind{}.Discover(), but audit was using collectInstalledSkillPaths() which calls sync.DiscoverSourceSkillsLite() — designed for skill directories. Then ParallelScan() called ScanSkill() which expects directories, not files. Changes: - Add IsFile flag to audit.SkillInput; when set, ParallelScan uses ScanFile/ScanFileForProject instead of ScanSkill variants - Add collectInstalledAgentPaths() using resource.AgentKind{}.Discover() and resource.ActiveAgents() for proper agent file discovery - Add toAgentAuditInputs() helper that sets IsFile=true - Branch on kindAgents in auditInstalled(), auditFiltered(), and launchAuditTUIWithTabs() to use agent-specific discovery and inputs - Add discoverAuditAgents() in server handler and update resolveAuditSource() to return isAgents flag - Update handleAuditAll, handleAuditStream to use agent-aware discovery and build SkillInput with IsFile=true for agents --- cmd/skillshare/audit.go | 61 +++++++++++++++++++++++-- cmd/skillshare/audit_render.go | 16 ++++++- internal/audit/parallel.go | 26 +++++++---- internal/server/handler_audit.go | 42 ++++++++++++++--- internal/server/handler_audit_stream.go | 23 ++++++++-- 5 files changed, 141 insertions(+), 27 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index 5e56c490..dba6eea5 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -11,6 +11,7 @@ import ( "skillshare/internal/audit" "skillshare/internal/config" "skillshare/internal/oplog" + "skillshare/internal/resource" "skillshare/internal/sync" "skillshare/internal/ui" "skillshare/internal/utils" @@ -487,6 +488,32 @@ func toAuditInputs(skills []auditSkillRef) []audit.SkillInput { return inputs } +func toAgentAuditInputs(agents []auditSkillRef) []audit.SkillInput { + inputs := make([]audit.SkillInput, len(agents)) + for i, a := range agents { + inputs[i] = audit.SkillInput{Name: a.name, Path: a.path, IsFile: true} + } + return inputs +} + +func collectInstalledAgentPaths(agentsSourcePath string) ([]auditSkillRef, error) { + if agentsSourcePath == "" { + return nil, nil + } + if _, err := os.Stat(agentsSourcePath); os.IsNotExist(err) { + return nil, nil + } + discovered, err := resource.AgentKind{}.Discover(agentsSourcePath) + if err != nil { + return nil, fmt.Errorf("failed to discover agents: %w", err) + } + var agentPaths []auditSkillRef + for _, d := range resource.ActiveAgents(discovered) { + agentPaths = append(agentPaths, auditSkillRef{name: d.FlatName, path: d.AbsPath}) + } + return agentPaths, nil +} + func scanPathTarget(targetPath, projectRoot string, registry *audit.Registry) (*audit.Result, error) { info, err := os.Stat(targetPath) if err != nil { @@ -509,12 +536,18 @@ func auditInstalled(sourcePath, agentsSourcePath, mode, projectRoot, threshold s Threshold: threshold, } - // Phase 0: discover skills. + // Phase 0: discover skills/agents. var spinner *ui.Spinner if !jsonOutput { spinner = ui.StartSpinner(fmt.Sprintf("Discovering %s...", kind.Noun(2))) } - skillPaths, err := collectInstalledSkillPaths(sourcePath) + var skillPaths []auditSkillRef + var err error + if kind == kindAgents { + skillPaths, err = collectInstalledAgentPaths(sourcePath) + } else { + skillPaths, err = collectInstalledSkillPaths(sourcePath) + } if err != nil { if spinner != nil { spinner.Fail("Discovery failed") @@ -565,7 +598,13 @@ func auditInstalled(sourcePath, agentsSourcePath, mode, projectRoot, threshold s progressBar.Increment() } } - scanResults := audit.ParallelScan(toAuditInputs(skillPaths), projectRoot, onDone, reg) + var scanInputs []audit.SkillInput + if kind == kindAgents { + scanInputs = toAgentAuditInputs(skillPaths) + } else { + scanInputs = toAuditInputs(skillPaths) + } + scanResults := audit.ParallelScan(scanInputs, projectRoot, onDone, reg) if progressBar != nil { progressBar.Stop() } @@ -633,7 +672,13 @@ func auditFiltered(sourcePath, agentsSourcePath string, names, groups []string, Threshold: threshold, } - allSkills, err := collectInstalledSkillPaths(sourcePath) + var allSkills []auditSkillRef + var err error + if kind == kindAgents { + allSkills, err = collectInstalledAgentPaths(sourcePath) + } else { + allSkills, err = collectInstalledSkillPaths(sourcePath) + } if err != nil { return nil, base, err } @@ -712,7 +757,13 @@ func auditFiltered(sourcePath, agentsSourcePath string, names, groups []string, progressBar.Increment() } } - scanResults := audit.ParallelScan(toAuditInputs(matched), projectRoot, onDone, reg) + var scanInputs []audit.SkillInput + if kind == kindAgents { + scanInputs = toAgentAuditInputs(matched) + } else { + scanInputs = toAuditInputs(matched) + } + scanResults := audit.ParallelScan(scanInputs, projectRoot, onDone, reg) if progressBar != nil { progressBar.Stop() } diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index 6f9d08d0..be7311df 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -85,9 +85,21 @@ func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOut } if otherSource != "" { - otherPaths, err := collectInstalledSkillPaths(otherSource) + var otherPaths []auditSkillRef + var err error + if otherKind == "agent" { + otherPaths, err = collectInstalledAgentPaths(otherSource) + } else { + otherPaths, err = collectInstalledSkillPaths(otherSource) + } + var otherInputs []audit.SkillInput + if otherKind == "agent" { + otherInputs = toAgentAuditInputs(otherPaths) + } else { + otherInputs = toAuditInputs(otherPaths) + } if err == nil && len(otherPaths) > 0 { - otherScanResults := audit.ParallelScan(toAuditInputs(otherPaths), ctx.projectRoot, nil, ctx.registry) + otherScanResults := audit.ParallelScan(otherInputs, ctx.projectRoot, nil, ctx.registry) for i := range otherPaths { if i < len(otherScanResults) { sr := otherScanResults[i] diff --git a/internal/audit/parallel.go b/internal/audit/parallel.go index 43630f68..134a6054 100644 --- a/internal/audit/parallel.go +++ b/internal/audit/parallel.go @@ -21,8 +21,9 @@ func workerCount() int { // SkillInput describes a skill to scan. type SkillInput struct { - Name string - Path string + Name string + Path string + IsFile bool // true for individual file scanning (agents) } // ScanOutput holds the result of scanning a single skill. @@ -48,23 +49,30 @@ func ParallelScan(skills []SkillInput, projectRoot string, onDone func(), regist for i, sk := range skills { wg.Add(1) sem <- struct{}{} - go func(idx int, path string) { + go func(idx int, input SkillInput) { defer wg.Done() defer func() { <-sem }() start := time.Now() var res *Result var err error - if registry != nil { + if input.IsFile { + // Agent: scan individual file if projectRoot != "" { - res, err = ScanSkillFilteredForProject(path, projectRoot, registry) + res, err = ScanFileForProject(input.Path, projectRoot) } else { - res, err = ScanSkillFiltered(path, registry) + res, err = ScanFile(input.Path) + } + } else if registry != nil { + if projectRoot != "" { + res, err = ScanSkillFilteredForProject(input.Path, projectRoot, registry) + } else { + res, err = ScanSkillFiltered(input.Path, registry) } } else { if projectRoot != "" { - res, err = ScanSkillForProject(path, projectRoot) + res, err = ScanSkillForProject(input.Path, projectRoot) } else { - res, err = ScanSkill(path) + res, err = ScanSkill(input.Path) } } outputs[idx] = ScanOutput{ @@ -75,7 +83,7 @@ func ParallelScan(skills []SkillInput, projectRoot string, onDone func(), regist if onDone != nil { onDone() } - }(i, sk.Path) + }(i, sk) } wg.Wait() diff --git a/internal/server/handler_audit.go b/internal/server/handler_audit.go index b5a39a2f..42a399fc 100644 --- a/internal/server/handler_audit.go +++ b/internal/server/handler_audit.go @@ -69,6 +69,19 @@ type skillEntry struct { path string } +// discoverAuditAgents discovers agents (individual .md files) for audit scanning. +func discoverAuditAgents(source string) ([]skillEntry, error) { + discovered, err := resource.AgentKind{}.Discover(source) + if err != nil { + return nil, err + } + var agents []skillEntry + for _, d := range resource.ActiveAgents(discovered) { + agents = append(agents, skillEntry{name: d.FlatName, path: d.AbsPath}) + } + return agents, nil +} + // discoverAuditSkills discovers and deduplicates skills for audit scanning. func discoverAuditSkills(source string) ([]skillEntry, error) { discovered, err := sync.DiscoverSourceSkills(source) @@ -251,19 +264,19 @@ func processAuditResults(skills []skillEntry, scanned []audit.ScanOutput, policy } } -// resolveAuditSource returns the source directory and result kind based on ?kind query param. -func (s *Server) resolveAuditSource(r *http.Request) (string, string) { +// resolveAuditSource returns the source directory, result kind label, and whether agents are being scanned. +func (s *Server) resolveAuditSource(r *http.Request) (string, string, bool) { kind := r.URL.Query().Get("kind") if kind == "agents" { - return s.agentsSource(), "agent" + return s.agentsSource(), "agent", true } - return s.cfg.Source, "skill" + return s.cfg.Source, "skill", false } func (s *Server) handleAuditAll(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() - source, resultKind := s.resolveAuditSource(r) + source, resultKind, isAgents := s.resolveAuditSource(r) policy := s.auditPolicy() projectRoot := s.projectRoot cfgPath := s.configPath() @@ -273,7 +286,13 @@ func (s *Server) handleAuditAll(w http.ResponseWriter, r *http.Request) { start := time.Now() - skills, err := discoverAuditSkills(source) + var skills []skillEntry + var err error + if isAgents { + skills, err = discoverAuditAgents(source) + } else { + skills, err = discoverAuditSkills(source) + } if err != nil { writeError(w, http.StatusInternalServerError, err.Error()) return @@ -283,7 +302,16 @@ func (s *Server) handleAuditAll(w http.ResponseWriter, r *http.Request) { if !isProjectMode { auditProjectRoot = "" } - scanned := audit.ParallelScan(skillsToAuditInputs(skills), auditProjectRoot, nil, nil) + var inputs []audit.SkillInput + if isAgents { + inputs = make([]audit.SkillInput, len(skills)) + for i, s := range skills { + inputs[i] = audit.SkillInput{Name: s.name, Path: s.path, IsFile: true} + } + } else { + inputs = skillsToAuditInputs(skills) + } + scanned := audit.ParallelScan(inputs, auditProjectRoot, nil, nil) agg := processAuditResults(skills, scanned, policy) for i := range agg.Results { diff --git a/internal/server/handler_audit_stream.go b/internal/server/handler_audit_stream.go index fd1e2899..2482007d 100644 --- a/internal/server/handler_audit_stream.go +++ b/internal/server/handler_audit_stream.go @@ -24,13 +24,19 @@ func (s *Server) handleAuditStream(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before slow I/O. s.mu.RLock() - source, resultKind := s.resolveAuditSource(r) + source, resultKind, isAgents := s.resolveAuditSource(r) projectRoot := s.projectRoot policy := s.auditPolicy() s.mu.RUnlock() - // 1. Discover skills - skills, err := discoverAuditSkills(source) + // 1. Discover skills/agents + var skills []skillEntry + var err error + if isAgents { + skills, err = discoverAuditAgents(source) + } else { + skills, err = discoverAuditSkills(source) + } if err != nil { safeSend("error", map[string]string{"error": err.Error()}) return @@ -64,7 +70,16 @@ func (s *Server) handleAuditStream(w http.ResponseWriter, r *http.Request) { onDone := func() { scanned.Add(1) } // 3. Parallel scan (blocks until all skills are scanned) - outputs := audit.ParallelScan(skillsToAuditInputs(skills), projectRoot, onDone, nil) + var inputs []audit.SkillInput + if isAgents { + inputs = make([]audit.SkillInput, len(skills)) + for i, s := range skills { + inputs[i] = audit.SkillInput{Name: s.name, Path: s.path, IsFile: true} + } + } else { + inputs = skillsToAuditInputs(skills) + } + outputs := audit.ParallelScan(inputs, projectRoot, onDone, nil) close(done) // signal ticker goroutine to stop wg.Wait() // wait for it to fully exit before writing to w From 353f0cb06850c5cb36d3a8fc21c5d0582de8e463 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:46:02 +0800 Subject: [PATCH 138/205] fix(ui): match audit tab style to resources page Replace the button-style tabs (filled bg, border-2) in AuditPage with the underline+icon+badge style used by ResourcesPage, TrashPage, and BatchUninstallPage for visual consistency across tabbed pages. - Use nav with role=tablist and aria-selected for accessibility - Add Puzzle/Bot icons matching ResourcesPage tabs - Show count badge only when data is available (tab.count != null) - Apply ss-resource-tab class for shared underline transition style --- ui/src/pages/AuditPage.tsx | 45 ++++++++++++++++++++++++++------------ 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index 95241781..f1ae663b 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -11,6 +11,8 @@ import { CircleCheck, Gauge, Eye, + Puzzle, + Bot, } from 'lucide-react'; import { api } from '../api/client'; import type { AuditAllResponse, AuditResult, AuditFinding } from '../api/client'; @@ -157,26 +159,41 @@ export default function AuditPage() {
{/* Kind tabs */} -
- {(['skills', 'agents'] as const).map((kind) => ( +
+ {/* Loading / Progress */} {loading && ( From 40347477d5b6fbb0ea1216e68f11a61a4df20002 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 14:49:24 +0800 Subject: [PATCH 139/205] fix(audit): scan all agents including disabled ones MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Security audit should scan ALL agents regardless of .agentignore status — disabled agents may still contain threats. --- cmd/skillshare/audit.go | 3 ++- internal/server/handler_audit.go | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index dba6eea5..64c69806 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -508,7 +508,8 @@ func collectInstalledAgentPaths(agentsSourcePath string) ([]auditSkillRef, error return nil, fmt.Errorf("failed to discover agents: %w", err) } var agentPaths []auditSkillRef - for _, d := range resource.ActiveAgents(discovered) { + // Audit scans ALL agents including disabled ones — security risks don't respect .agentignore. + for _, d := range discovered { agentPaths = append(agentPaths, auditSkillRef{name: d.FlatName, path: d.AbsPath}) } return agentPaths, nil diff --git a/internal/server/handler_audit.go b/internal/server/handler_audit.go index 42a399fc..9c7ec090 100644 --- a/internal/server/handler_audit.go +++ b/internal/server/handler_audit.go @@ -76,7 +76,7 @@ func discoverAuditAgents(source string) ([]skillEntry, error) { return nil, err } var agents []skillEntry - for _, d := range resource.ActiveAgents(discovered) { + for _, d := range discovered { agents = append(agents, skillEntry{name: d.FlatName, path: d.AbsPath}) } return agents, nil From 91eabf336912fc5cc9c42523bd5ada73ddbe00a8 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:02:46 +0800 Subject: [PATCH 140/205] feat(ui): cache audit results with React Query SSE scan results are written to query cache on completion. Navigating away and back shows cached results instead of requiring a re-scan. Cache expires after 5 minutes (staleTimes.audit). Per-kind cache keys ensure skills and agents results are independent. --- ui/src/lib/queryKeys.ts | 3 ++- ui/src/pages/AuditPage.tsx | 25 +++++++++++++++++++------ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/ui/src/lib/queryKeys.ts b/ui/src/lib/queryKeys.ts index 603e49c3..6aeea767 100644 --- a/ui/src/lib/queryKeys.ts +++ b/ui/src/lib/queryKeys.ts @@ -21,7 +21,7 @@ export const queryKeys = { gitBranches: ['git-branches'] as const, audit: { - all: ['audit'] as const, + all: (kind?: string) => ['audit', kind ?? 'skills'] as const, skill: (name: string) => ['audit', 'skill', name] as const, rules: ['audit', 'rules'] as const, compiled: ['audit', 'rules', 'compiled'] as const, @@ -56,6 +56,7 @@ export const staleTimes = { auditRules: 5 * 60 * 1000, // 5min backups: 2 * 60 * 1000, // 2min trash: 2 * 60 * 1000, // 2min + audit: 5 * 60 * 1000, // 5min — full audit scan, expensive auditSkill: 5 * 60 * 1000, // 5min — per-skill audit, rarely changes check: 60 * 1000, // 1min syncMatrix: 30 * 1000, // 30s — changes after filter edits diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index f1ae663b..7cc46bde 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -14,6 +14,7 @@ import { Puzzle, Bot, } from 'lucide-react'; +import { useQueryClient } from '@tanstack/react-query'; import { api } from '../api/client'; import type { AuditAllResponse, AuditResult, AuditFinding } from '../api/client'; import Card from '../components/Card'; @@ -29,6 +30,7 @@ import { severityBadgeVariant } from '../lib/severity'; import { BlockStamp, RiskMeter, riskColor, riskBgColor } from '../components/audit'; import ScrollToTop from '../components/ScrollToTop'; import KindBadge from '../components/KindBadge'; +import { queryKeys, staleTimes } from '../lib/queryKeys'; type SeverityFilter = 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO'; type AuditKind = 'skills' | 'agents'; @@ -43,18 +45,28 @@ const severityFilterOptions: { value: SeverityFilter; label: string }[] = [ export default function AuditPage() { const { toast } = useToast(); + const queryClient = useQueryClient(); const [activeKind, setActiveKind] = useState('skills'); - const [dataCache, setDataCache] = useState>({ - skills: null, - agents: null, - }); - const data = dataCache[activeKind]; const [loading, setLoading] = useState(false); const [error, setError] = useState(null); const [minSeverity, setMinSeverity] = useState('MEDIUM'); const [progress, setProgress] = useState<{ scanned: number; total: number } | null>(null); const esRef = useRef(null); const startTimeRef = useRef(0); + // Bump to trigger re-render after writing to query cache + const [, setCacheTick] = useState(0); + + // Read cached audit results per kind from React Query cache. + // Cache survives page navigation; stale after staleTimes.audit (5min). + const getCached = (kind: AuditKind): AuditAllResponse | null => { + const state = queryClient.getQueryState(queryKeys.audit.all(kind)); + if (!state || state.dataUpdatedAt === 0) return null; + // Respect stale time — don't show data older than threshold + if (Date.now() - state.dataUpdatedAt > staleTimes.audit) return null; + return queryClient.getQueryData(queryKeys.audit.all(kind)) ?? null; + }; + const dataCache = { skills: getCached('skills'), agents: getCached('agents') }; + const data = dataCache[activeKind]; // Clean up EventSource on unmount useEffect(() => { @@ -113,7 +125,8 @@ export default function AuditPage() { (total) => setProgress({ scanned: 0, total }), (scanned) => setProgress((p) => p ? { ...p, scanned } : null), (res) => { - setDataCache((prev) => ({ ...prev, [activeKind]: res })); + queryClient.setQueryData(queryKeys.audit.all(activeKind), res); + setCacheTick((n) => n + 1); setLoading(false); setProgress(null); showAuditToast(res); From 01a6f662c6cc6472aa9bf59bd1f9b82fc98fde8a Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:13:29 +0800 Subject: [PATCH 141/205] fix(ui): exclude _cross-skill from audit card count and show separately Cross-skill analysis is a synthetic result, not an actual scanned resource. Separating it fixes the mismatch between summary counts (e.g. '4 warnings') and visible card count (was showing 5 cards). Now shown as a distinct 'Cross-resource analysis' section. --- ui/src/pages/AuditPage.tsx | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index 7cc46bde..b9b71ab6 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -75,15 +75,19 @@ export default function AuditPage() { }; }, []); - const totalFindings = useMemo(() => { - if (!data) return 0; - return data.results.reduce((sum, result) => sum + result.findings.length, 0); + // Exclude synthetic _cross-skill result from real scan results. + // Cross-skill analysis is a derived insight, not an actual scanned resource. + const realResults = useMemo(() => { + if (!data) return []; + return data.results.filter((r) => r.skillName !== '_cross-skill'); }, [data]); - const filteredResults = useMemo(() => { - if (!data) return []; + const totalFindings = useMemo(() => { + return realResults.reduce((sum, result) => sum + result.findings.length, 0); + }, [realResults]); - return data.results + const filteredResults = useMemo(() => { + return realResults .map((result) => ({ ...result, findings: result.findings.filter((finding) => isSeverityAtOrAbove(finding.severity, minSeverity)), @@ -94,7 +98,7 @@ export default function AuditPage() { if (bySeverity !== 0) return bySeverity; return b.riskScore - a.riskScore; }); - }, [data, minSeverity]); + }, [realResults, minSeverity]); const visibleFindings = useMemo( () => filteredResults.reduce((sum, result) => sum + result.findings.length, 0), From e1c6549129bd62d637a31b6834d46bb29b045a7f Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:16:18 +0800 Subject: [PATCH 142/205] fix(ui): fix audit summary mixing agent counts with finding counts Summary line now shows only consistent metrics: - scanned/passed/blocked/with findings (all agent-level) - total finding count (clearly labeled) Removed low/info severity counts that were confusing because they counted findings while adjacent numbers counted agents. --- ui/src/pages/AuditPage.tsx | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index b9b71ab6..863999f2 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -322,21 +322,10 @@ function AuditSummaryLine({ summary }: { summary: AuditAllResponse['summary'] }) return (

{summary.total} scanned - {summary.passed > 0 && ( - <>{' · '}{summary.passed} passed - )} + {' · '}{summary.passed} passed {summary.failed > 0 && ( <>{' · '}{summary.failed} blocked )} - {summary.warning > 0 && ( - <>{' · '}{summary.warning} warnings - )} - {summary.low > 0 && ( - <>{' · '}{summary.low} low - )} - {summary.info > 0 && ( - <>{' · '}{summary.info} info - )}

); } From 12490620b033ebd28cfd28e74af934964263558a Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:27:35 +0800 Subject: [PATCH 143/205] fix(ui): audit card icon color follows max severity MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Icon/border color now maps to the highest severity finding in the card: CRITICAL → red, HIGH/MEDIUM → orange, LOW → blue, INFO → gray. Previously all non-blocked cards used the same orange regardless of severity, making it impossible to gauge severity at a glance. --- ui/src/pages/AuditPage.tsx | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index 863999f2..4e207f95 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -515,6 +515,8 @@ function TriagePanel({ function SkillAuditCard({ result }: { result: AuditResult; index?: number }) { const maxSeverity = getMaxSeverity(result.findings); + const iconColor = riskColor(result.riskLabel); + const iconBg = riskBgColor(result.riskLabel); return ( @@ -526,14 +528,8 @@ function SkillAuditCard({ result }: { result: AuditResult; index?: number }) { {/* Left: skill icon + name + issue count */}
{result.isBlocked ? ( From 486f13513ea57f36bd2215c13dde1fc9ccdfd2b7 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:35:45 +0800 Subject: [PATCH 144/205] fix(audit): exclude _cross-skill from TUI tab counts Filter out synthetic _cross-skill result before passing to TUI. Fixes Skills(7) showing when only 6 real skills were scanned. --- cmd/skillshare/audit_render.go | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index be7311df..1ec224ed 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -120,16 +120,28 @@ func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOut } } + // Filter out synthetic _cross-skill result — it's not a real resource. + var filteredResults []*audit.Result + var filteredOutputs []audit.ScanOutput + for i, r := range results { + if r.SkillName != "_cross-skill" { + filteredResults = append(filteredResults, r) + if i < len(scanOutputs) { + filteredOutputs = append(filteredOutputs, scanOutputs[i]) + } + } + } + // Arrange into skills vs agents. var skillResults, agentResults []*audit.Result var skillOutputs, agentOutputs []audit.ScanOutput var skillSummary, agentSummary auditRunSummary if ctx.kind == kindAgents { - agentResults, agentOutputs, agentSummary = results, scanOutputs, summary + agentResults, agentOutputs, agentSummary = filteredResults, filteredOutputs, summary skillResults, skillOutputs, skillSummary = otherResults, otherOutputs, otherSummary } else { - skillResults, skillOutputs, skillSummary = results, scanOutputs, summary + skillResults, skillOutputs, skillSummary = filteredResults, filteredOutputs, summary agentResults, agentOutputs, agentSummary = otherResults, otherOutputs, otherSummary } From 4c0674ddc18bb5df5f20f5d594fc838f167a8aa0 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:38:02 +0800 Subject: [PATCH 145/205] fix(audit): set agentsSourcePath in project mode for TUI tab Project mode was missing agentsSourcePath assignment, causing Agents(0) in TUI even when agents exist. Also simplifies the kind-based source selection to use the existing override at line 217. --- cmd/skillshare/audit.go | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index 64c69806..61b8630e 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -188,11 +188,8 @@ func cmdAudit(args []string) error { if err != nil { return err } - if kind == kindAgents { - sourcePath = rt.agentsSourcePath - } else { - sourcePath = rt.sourcePath - } + sourcePath = rt.sourcePath + agentsSourcePath = rt.agentsSourcePath projectRoot = cwd defaultThreshold = rt.config.Audit.BlockThreshold configProfile = rt.config.Audit.Profile From 33cbd17ddcc97d002ecf59877218da0b8872698c Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:39:18 +0800 Subject: [PATCH 146/205] fix(audit): show 'No skills/agents' instead of 'No items' in TUI Set list item name per tab so empty state shows contextual message. Also align empty state text with left padding matching list title. --- cmd/skillshare/audit_tui.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cmd/skillshare/audit_tui.go b/cmd/skillshare/audit_tui.go index 75592292..457120e1 100644 --- a/cmd/skillshare/audit_tui.go +++ b/cmd/skillshare/audit_tui.go @@ -305,6 +305,8 @@ func newAuditTUIModel( l := list.New(listItems, auditDelegate{}, 0, 0) l.Title = fmt.Sprintf("Audit results (%d scanned)", activeSummary.Scanned) l.Styles.Title = tc.ListTitle + l.Styles.NoItems = l.Styles.NoItems.PaddingLeft(2) + l.SetStatusBarItemName(initialTab.noun(), initialTab.noun()) l.SetShowStatusBar(false) l.SetFilteringEnabled(false) l.SetShowHelp(false) @@ -345,6 +347,7 @@ func (m *auditTUIModel) switchTab() { m.detailScroll = 0 m.applyFilter() m.list.Title = fmt.Sprintf("Audit results (%d scanned)", m.summary.Scanned) + m.list.SetStatusBarItemName(m.activeTab.noun(), m.activeTab.noun()) skipGroupItem(&m.list, 1) } From ff1aedfd1a337c45c875afab6c06c8b2590533e3 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 15:41:29 +0800 Subject: [PATCH 147/205] fix(audit): group agents by directory in TUI auditRepoKey now groups any nested path (e.g. demo/code-reviewer.md) by its first segment, not just tracked repo prefixes starting with _. This gives agents the same directory grouping as the Resources page. --- cmd/skillshare/audit_tui.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cmd/skillshare/audit_tui.go b/cmd/skillshare/audit_tui.go index 457120e1..fa2e861a 100644 --- a/cmd/skillshare/audit_tui.go +++ b/cmd/skillshare/audit_tui.go @@ -84,10 +84,13 @@ func compactAuditPath(name string) string { return strings.Join(segments, "/") } -// auditRepoKey extracts the grouping key from a skill name. +// auditRepoKey extracts the grouping key from a skill/agent name. +// For tracked repos: "_repo-name/skill" → "_repo-name" +// For nested agents: "demo/code-reviewer.md" → "demo" +// For flat names: "my-skill" → "" (standalone) func auditRepoKey(name string) string { segments := strings.Split(name, "/") - if strings.HasPrefix(segments[0], "_") { + if len(segments) > 1 { return segments[0] } return "" From ed924c919356d3d446417066368a121ba1a5db97 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 16:02:05 +0800 Subject: [PATCH 148/205] fix(list): fix tab bar clipped off screen in split layout Panel height calculation was off by 1 (subtracted 7 but actual overhead is 8 lines), causing the tab bar to be pushed above the visible terminal area. --- cmd/skillshare/list_tui.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cmd/skillshare/list_tui.go b/cmd/skillshare/list_tui.go index 918c0b9b..5e8ac9e0 100644 --- a/cmd/skillshare/list_tui.go +++ b/cmd/skillshare/list_tui.go @@ -635,7 +635,8 @@ func (m listTUIModel) renderFilterBar() string { func (m *listTUIModel) syncListSize() { if listSplitActive(m.termWidth) { - panelHeight := m.termHeight - 7 // -2 for tab bar + // tab(1) + gap(1) + panel + gap(1) + filter(1) + summary(1) + gap(1) + help(1) + trail(1) = 8 overhead + panelHeight := m.termHeight - 8 if panelHeight < 6 { panelHeight = 6 } @@ -643,7 +644,7 @@ func (m *listTUIModel) syncListSize() { return } - listHeight := m.termHeight - 22 // -2 for tab bar + listHeight := m.termHeight - 22 if listHeight < 6 { listHeight = 6 } @@ -690,7 +691,7 @@ func (m listTUIModel) viewSplit() string { b.WriteString(m.renderTabBar()) b.WriteString("\n\n") - panelHeight := m.termHeight - 7 // -2 for tab bar + panelHeight := m.termHeight - 8 if panelHeight < 6 { panelHeight = 6 } From f0f371c60444a49f2728cb8a55eed72be9ad14e3 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 16:07:07 +0800 Subject: [PATCH 149/205] fix(list): group agents by directory in TUI Set RepoName from parent directory for nested agents so buildGroupedItems creates proper group headers (e.g. demo (27)) instead of putting all agents under standalone. --- cmd/skillshare/list.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 79bf6c8c..2c082bcf 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -282,6 +282,12 @@ func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { IsNested: d.IsNested, Disabled: d.Disabled, } + // Group agents by parent directory (like tracked repos for skills). + if d.IsNested { + if dir := filepath.Dir(d.RelPath); dir != "." { + entries[i].RepoName = dir + } + } key := strings.TrimSuffix(d.RelPath, ".md") if entry := store.GetByPath(key); entry != nil { entries[i].Source = entry.Source From aaabc3ad889d1cb4427c7d347c03863ae0b77bb7 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 16:23:01 +0800 Subject: [PATCH 150/205] =?UTF-8?q?refactor:=20simplify=20audit=20code=20?= =?UTF-8?q?=E2=80=94=20constants,=20dedup=20helpers,=20cache=20pattern?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add audit.CrossSkillResultName constant, use in Go + TS - Merge toAuditInputs/toAgentAuditInputs into one with isFile param - Extract discoverForKind() and toInputsForKind() to eliminate 5+ if-else branches - Simplify launchAuditTUIWithTabs with otherKindFilter dispatch - Use CROSS_SKILL_NAME constant in AuditPage.tsx --- cmd/skillshare/audit.go | 41 ++++++++++++++-------------------- cmd/skillshare/audit_render.go | 23 +++++-------------- internal/audit/crossskill.go | 5 ++++- ui/src/pages/AuditPage.tsx | 4 +++- 4 files changed, 30 insertions(+), 43 deletions(-) diff --git a/cmd/skillshare/audit.go b/cmd/skillshare/audit.go index 61b8630e..efabdd17 100644 --- a/cmd/skillshare/audit.go +++ b/cmd/skillshare/audit.go @@ -477,18 +477,10 @@ func scanSkillPath(skillPath, projectRoot string, registry *audit.Registry) (*au return audit.ScanSkill(skillPath) } -func toAuditInputs(skills []auditSkillRef) []audit.SkillInput { - inputs := make([]audit.SkillInput, len(skills)) - for i, s := range skills { - inputs[i] = audit.SkillInput{Name: s.name, Path: s.path} - } - return inputs -} - -func toAgentAuditInputs(agents []auditSkillRef) []audit.SkillInput { - inputs := make([]audit.SkillInput, len(agents)) - for i, a := range agents { - inputs[i] = audit.SkillInput{Name: a.name, Path: a.path, IsFile: true} +func toAuditInputs(items []auditSkillRef, isFile bool) []audit.SkillInput { + inputs := make([]audit.SkillInput, len(items)) + for i, item := range items { + inputs[i] = audit.SkillInput{Name: item.name, Path: item.path, IsFile: isFile} } return inputs } @@ -512,6 +504,17 @@ func collectInstalledAgentPaths(agentsSourcePath string) ([]auditSkillRef, error return agentPaths, nil } +func discoverForKind(kind resourceKindFilter, sourcePath string) ([]auditSkillRef, error) { + if kind == kindAgents { + return collectInstalledAgentPaths(sourcePath) + } + return collectInstalledSkillPaths(sourcePath) +} + +func toInputsForKind(kind resourceKindFilter, items []auditSkillRef) []audit.SkillInput { + return toAuditInputs(items, kind == kindAgents) +} + func scanPathTarget(targetPath, projectRoot string, registry *audit.Registry) (*audit.Result, error) { info, err := os.Stat(targetPath) if err != nil { @@ -596,12 +599,7 @@ func auditInstalled(sourcePath, agentsSourcePath, mode, projectRoot, threshold s progressBar.Increment() } } - var scanInputs []audit.SkillInput - if kind == kindAgents { - scanInputs = toAgentAuditInputs(skillPaths) - } else { - scanInputs = toAuditInputs(skillPaths) - } + scanInputs := toInputsForKind(kind, skillPaths) scanResults := audit.ParallelScan(scanInputs, projectRoot, onDone, reg) if progressBar != nil { progressBar.Stop() @@ -755,12 +753,7 @@ func auditFiltered(sourcePath, agentsSourcePath string, names, groups []string, progressBar.Increment() } } - var scanInputs []audit.SkillInput - if kind == kindAgents { - scanInputs = toAgentAuditInputs(matched) - } else { - scanInputs = toAuditInputs(matched) - } + scanInputs := toInputsForKind(kind, matched) scanResults := audit.ParallelScan(scanInputs, projectRoot, onDone, reg) if progressBar != nil { progressBar.Stop() diff --git a/cmd/skillshare/audit_render.go b/cmd/skillshare/audit_render.go index 1ec224ed..de072be6 100644 --- a/cmd/skillshare/audit_render.go +++ b/cmd/skillshare/audit_render.go @@ -77,27 +77,16 @@ func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOut var otherOutputs []audit.ScanOutput var otherSummary auditRunSummary + otherKindFilter := kindAgents otherSource := ctx.agentsSourcePath - otherKind := "agent" if ctx.kind == kindAgents { + otherKindFilter = kindSkills otherSource = ctx.sourcePath - otherKind = "skill" } if otherSource != "" { - var otherPaths []auditSkillRef - var err error - if otherKind == "agent" { - otherPaths, err = collectInstalledAgentPaths(otherSource) - } else { - otherPaths, err = collectInstalledSkillPaths(otherSource) - } - var otherInputs []audit.SkillInput - if otherKind == "agent" { - otherInputs = toAgentAuditInputs(otherPaths) - } else { - otherInputs = toAuditInputs(otherPaths) - } + otherPaths, err := discoverForKind(otherKindFilter, otherSource) + otherInputs := toInputsForKind(otherKindFilter, otherPaths) if err == nil && len(otherPaths) > 0 { otherScanResults := audit.ParallelScan(otherInputs, ctx.projectRoot, nil, ctx.registry) for i := range otherPaths { @@ -106,7 +95,7 @@ func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOut if sr.Err == nil { sr.Result.Threshold = ctx.threshold sr.Result.IsBlocked = sr.Result.HasSeverityAtOrAbove(ctx.threshold) - sr.Result.Kind = otherKind + sr.Result.Kind = otherKindFilter.SingularNoun() if rel, relErr := filepath.Rel(otherSource, sr.Result.ScanTarget); relErr == nil { sr.Result.SkillName = rel } @@ -124,7 +113,7 @@ func launchAuditTUIWithTabs(results []*audit.Result, scanOutputs []audit.ScanOut var filteredResults []*audit.Result var filteredOutputs []audit.ScanOutput for i, r := range results { - if r.SkillName != "_cross-skill" { + if r.SkillName != audit.CrossSkillResultName { filteredResults = append(filteredResults, r) if i < len(scanOutputs) { filteredOutputs = append(filteredOutputs, scanOutputs[i]) diff --git a/internal/audit/crossskill.go b/internal/audit/crossskill.go index 938d70d4..a8ddb59e 100644 --- a/internal/audit/crossskill.go +++ b/internal/audit/crossskill.go @@ -6,6 +6,9 @@ import ( "strings" ) +// CrossSkillResultName is the synthetic skill name used for cross-skill analysis results. +const CrossSkillResultName = "_cross-skill" + // skillCapability summarises the security-relevant capabilities of a single skill, // derived entirely from its existing Result (TierProfile + Findings). type skillCapability struct { @@ -134,7 +137,7 @@ func CrossSkillAnalysis(results []*Result) *Result { } r := &Result{ - SkillName: "_cross-skill", + SkillName: CrossSkillResultName, Findings: findings, Analyzability: 1.0, } diff --git a/ui/src/pages/AuditPage.tsx b/ui/src/pages/AuditPage.tsx index 4e207f95..b4eb2002 100644 --- a/ui/src/pages/AuditPage.tsx +++ b/ui/src/pages/AuditPage.tsx @@ -35,6 +35,8 @@ import { queryKeys, staleTimes } from '../lib/queryKeys'; type SeverityFilter = 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' | 'INFO'; type AuditKind = 'skills' | 'agents'; +const CROSS_SKILL_NAME = '_cross-skill'; + const severityFilterOptions: { value: SeverityFilter; label: string }[] = [ { value: 'INFO', label: 'All (INFO+)' }, { value: 'LOW', label: 'LOW+' }, @@ -79,7 +81,7 @@ export default function AuditPage() { // Cross-skill analysis is a derived insight, not an actual scanned resource. const realResults = useMemo(() => { if (!data) return []; - return data.results.filter((r) => r.skillName !== '_cross-skill'); + return data.results.filter((r) => r.skillName !== CROSS_SKILL_NAME); }, [data]); const totalFindings = useMemo(() => { From 335092066fb80116dc9efc23ebab1557233cc539 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:34:21 +0800 Subject: [PATCH 151/205] feat(server): add computeAgentTargetDiff for agent diff in UI Port agent diff logic from CLI (diff_agents.go) to server package. Returns []diffItem with Kind='agent' for link/update/prune/local actions. --- internal/server/handler_agent_diff.go | 98 ++++++++++++++++++++++ internal/server/handler_agent_diff_test.go | 75 +++++++++++++++++ 2 files changed, 173 insertions(+) create mode 100644 internal/server/handler_agent_diff.go create mode 100644 internal/server/handler_agent_diff_test.go diff --git a/internal/server/handler_agent_diff.go b/internal/server/handler_agent_diff.go new file mode 100644 index 00000000..de2d1322 --- /dev/null +++ b/internal/server/handler_agent_diff.go @@ -0,0 +1,98 @@ +package server + +import ( + "os" + "path/filepath" + "strings" + + "skillshare/internal/resource" + "skillshare/internal/utils" +) + +// computeAgentTargetDiff computes diff items for agents in a single target directory. +// Returns items with Kind="agent" for each pending action (link, update, prune, local). +func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResource) []diffItem { + var items []diffItem + + // Build expected set + expected := make(map[string]resource.DiscoveredResource, len(agents)) + for _, a := range agents { + expected[a.FlatName] = a + } + + // Read existing .md files in target + existing := make(map[string]os.FileMode) + if entries, err := os.ReadDir(targetDir); err == nil { + for _, e := range entries { + if e.IsDir() || !strings.HasSuffix(strings.ToLower(e.Name()), ".md") { + continue + } + if resource.ConventionalExcludes[e.Name()] { + continue + } + existing[e.Name()] = e.Type() + } + } + + // Missing agents → link + for flatName, agent := range expected { + if _, ok := existing[flatName]; !ok { + items = append(items, diffItem{ + Skill: flatName, + Action: "link", + Reason: "source only", + Kind: "agent", + }) + continue + } + // Exists — check if symlink points to correct source + targetPath := filepath.Join(targetDir, flatName) + if utils.IsSymlinkOrJunction(targetPath) { + absLink, err := utils.ResolveLinkTarget(targetPath) + if err != nil { + items = append(items, diffItem{ + Skill: flatName, + Action: "update", + Reason: "link target unreadable", + Kind: "agent", + }) + continue + } + absSource, _ := filepath.Abs(agent.AbsPath) + if !utils.PathsEqual(absLink, absSource) { + items = append(items, diffItem{ + Skill: flatName, + Action: "update", + Reason: "symlink points elsewhere", + Kind: "agent", + }) + } + // else: in sync, no item emitted + } + // Non-symlink existing file: already local, no action needed for expected agents + } + + // Orphan/local detection + for name, fileType := range existing { + if _, ok := expected[name]; ok { + continue + } + if fileType&os.ModeSymlink != 0 { + items = append(items, diffItem{ + Skill: name, + Action: "prune", + Reason: "orphan symlink", + Kind: "agent", + }) + } else { + items = append(items, diffItem{ + Skill: name, + Action: "local", + Reason: "local file", + Kind: "agent", + }) + } + } + + return items +} diff --git a/internal/server/handler_agent_diff_test.go b/internal/server/handler_agent_diff_test.go new file mode 100644 index 00000000..3deefd9a --- /dev/null +++ b/internal/server/handler_agent_diff_test.go @@ -0,0 +1,75 @@ +package server + +import ( + "os" + "path/filepath" + "testing" + + "skillshare/internal/resource" +) + +func TestComputeAgentTargetDiff_MissingInTarget(t *testing.T) { + targetDir := t.TempDir() + + agents := []resource.DiscoveredResource{ + {FlatName: "helper.md", AbsPath: "/src/helper.md", RelPath: "helper.md"}, + } + + items := computeAgentTargetDiff(targetDir, agents) + + if len(items) != 1 { + t.Fatalf("expected 1 item, got %d", len(items)) + } + if items[0].Action != "link" { + t.Errorf("expected action 'link', got %q", items[0].Action) + } + if items[0].Kind != "agent" { + t.Errorf("expected kind 'agent', got %q", items[0].Kind) + } +} + +func TestComputeAgentTargetDiff_OrphanSymlink(t *testing.T) { + targetDir := t.TempDir() + os.Symlink("/nonexistent/old.md", filepath.Join(targetDir, "orphan.md")) + + items := computeAgentTargetDiff(targetDir, nil) + + if len(items) != 1 { + t.Fatalf("expected 1 item, got %d", len(items)) + } + if items[0].Action != "prune" { + t.Errorf("expected action 'prune', got %q", items[0].Action) + } +} + +func TestComputeAgentTargetDiff_LocalFile(t *testing.T) { + targetDir := t.TempDir() + os.WriteFile(filepath.Join(targetDir, "local.md"), []byte("# Local"), 0644) + + items := computeAgentTargetDiff(targetDir, nil) + + if len(items) != 1 { + t.Fatalf("expected 1 item, got %d", len(items)) + } + if items[0].Action != "local" { + t.Errorf("expected action 'local', got %q", items[0].Action) + } +} + +func TestComputeAgentTargetDiff_InSync(t *testing.T) { + sourceDir := t.TempDir() + targetDir := t.TempDir() + srcFile := filepath.Join(sourceDir, "agent.md") + os.WriteFile(srcFile, []byte("# Agent"), 0644) + os.Symlink(srcFile, filepath.Join(targetDir, "agent.md")) + + agents := []resource.DiscoveredResource{ + {FlatName: "agent.md", AbsPath: srcFile, RelPath: "agent.md"}, + } + + items := computeAgentTargetDiff(targetDir, agents) + + if len(items) != 0 { + t.Fatalf("expected 0 items (in sync), got %d", len(items)) + } +} From 55e2e0e5b3ba3bcaf61a01767384493f22c6c341 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:38:37 +0800 Subject: [PATCH 152/205] feat(server): include agent diffs in handleDiff and handleDiffStream - Discover agents from agentsSource() and compute per-target diffs - Merge agent diff items into existing target results - Set Kind='skill' on all existing skill diff items - Set Kind='agent' on all agent diff items - Targets without agentPath are skipped (matches CLI behavior) --- internal/server/handler_diff_stream.go | 99 ++++++++++++++++++++------ internal/server/handler_sync.go | 57 +++++++++++++++ 2 files changed, 136 insertions(+), 20 deletions(-) diff --git a/internal/server/handler_diff_stream.go b/internal/server/handler_diff_stream.go index e734de3f..12f8c38a 100644 --- a/internal/server/handler_diff_stream.go +++ b/internal/server/handler_diff_stream.go @@ -7,6 +7,7 @@ import ( "path/filepath" "skillshare/internal/config" + "skillshare/internal/resource" ssync "skillshare/internal/sync" "skillshare/internal/utils" ) @@ -66,6 +67,64 @@ func (s *Server) handleDiffStream(w http.ResponseWriter, r *http.Request) { }) } + // Agent diffs — discover agents and compute per-target diffs + agentsSource := s.agentsSource() + var agents []resource.DiscoveredResource + if agentsSource != "" { + discovered, _ := resource.AgentKind{}.Discover(agentsSource) + agents = resource.ActiveAgents(discovered) + } + + if len(agents) > 0 { + var builtinAgents map[string]config.TargetConfig + if s.IsProjectMode() { + builtinAgents = config.ProjectAgentTargets() + } else { + builtinAgents = config.DefaultAgentTargets() + } + + for name, target := range targets { + select { + case <-ctx.Done(): + return + default: + } + + ac := target.AgentsConfig() + agentPath := ac.Path + if agentPath == "" { + if builtin, ok := builtinAgents[name]; ok { + agentPath = builtin.Path + } + } + if agentPath == "" { + continue + } + agentPath = config.ExpandPath(agentPath) + + agentItems := computeAgentTargetDiff(agentPath, agents) + if len(agentItems) == 0 { + continue + } + + // Merge into existing diff for this target + merged := false + for i := range diffs { + if diffs[i].Target == name { + diffs[i].Items = append(diffs[i].Items, agentItems...) + merged = true + break + } + } + if !merged { + diffs = append(diffs, diffTarget{ + Target: name, + Items: agentItems, + }) + } + } + } + donePayload := map[string]any{"diffs": diffs} maps.Copy(donePayload, ignorePayload(ignoreStats)) safeSend("done", donePayload) @@ -85,7 +144,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if mode == "symlink" { status := ssync.CheckStatus(sc.Path, source) if status != ssync.StatusLinked { - dt.Items = append(dt.Items, diffItem{Skill: "(entire directory)", Action: "link", Reason: "source only"}) + dt.Items = append(dt.Items, diffItem{Skill: "(entire directory)", Action: "link", Reason: "source only", Kind: "skill"}) } return dt } @@ -100,7 +159,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc TargetNaming: sc.TargetNaming, }, filtered) if err != nil { - dt.Items = append(dt.Items, diffItem{Skill: "(target naming)", Action: "skip", Reason: err.Error()}) + dt.Items = append(dt.Items, diffItem{Skill: "(target naming)", Action: "skip", Reason: err.Error(), Kind: "skill"}) return dt } // Surface collision/validation stats so the UI can show why skills were skipped @@ -118,23 +177,23 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if !isManaged { if info, statErr := os.Stat(targetSkillPath); statErr == nil { if info.IsDir() { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: "skill"}) } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: "skill"}) } } else if os.IsNotExist(statErr) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: "skill"}) } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: "skill"}) } } else { targetInfo, statErr := os.Stat(targetSkillPath) if os.IsNotExist(statErr) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "missing (deleted from target)"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "missing (deleted from target)", Kind: "skill"}) } else if statErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: "skill"}) } else if !targetInfo.IsDir() { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: "skill"}) } else { oldMtime := manifest.Mtimes[resolved.TargetName] currentMtime, mtimeErr := ssync.DirMaxMtime(skill.SourcePath) @@ -143,9 +202,9 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc } srcChecksum, checksumErr := ssync.DirChecksum(skill.SourcePath) if checksumErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot compute checksum"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot compute checksum", Kind: "skill"}) } else if srcChecksum != oldChecksum { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "content changed"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "content changed", Kind: "skill"}) } } } @@ -155,7 +214,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc continue } if !validNames[managedName] { - dt.Items = append(dt.Items, diffItem{Skill: managedName, Action: "prune", Reason: "orphan copy"}) + dt.Items = append(dt.Items, diffItem{Skill: managedName, Action: "prune", Reason: "orphan copy", Kind: "skill"}) } } return dt @@ -168,7 +227,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc _, err := os.Lstat(targetSkillPath) if err != nil { if os.IsNotExist(err) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: "skill"}) } continue } @@ -176,15 +235,15 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if utils.IsSymlinkOrJunction(targetSkillPath) { absLink, linkErr := utils.ResolveLinkTarget(targetSkillPath) if linkErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "link target unreadable"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "link target unreadable", Kind: "skill"}) continue } absSource, _ := filepath.Abs(skill.SourcePath) if !utils.PathsEqual(absLink, absSource) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "symlink points elsewhere"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "symlink points elsewhere", Kind: "skill"}) } } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: "skill"}) } } @@ -212,16 +271,16 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc } absSource, _ := filepath.Abs(source) if utils.PathHasPrefix(absLink, absSource+string(filepath.Separator)) { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan symlink"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan symlink", Kind: "skill"}) } } else if info.IsDir() { if _, inManifest := manifest.Managed[eName]; inManifest { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory (manifest)"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory (manifest)", Kind: "skill"}) } else { if resolution.Naming == "flat" && (utils.HasNestedSeparator(eName) || utils.IsTrackedRepoDir(eName)) { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory", Kind: "skill"}) } else { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "local", Reason: "local only"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "local", Reason: "local only", Kind: "skill"}) } } } diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index 008e755d..26282acc 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -9,6 +9,7 @@ import ( "time" "skillshare/internal/config" + "skillshare/internal/resource" "skillshare/internal/skillignore" ssync "skillshare/internal/sync" ) @@ -211,6 +212,7 @@ func (s *Server) handleDiff(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before slow I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() globalMode := s.cfg.Mode targets := s.cloneTargets() s.mu.RUnlock() @@ -235,6 +237,61 @@ func (s *Server) handleDiff(w http.ResponseWriter, r *http.Request) { diffs = append(diffs, s.computeTargetDiff(name, target, discovered, globalMode, source)) } + // Agent diffs — discover agents and compute per-target diffs + var agents []resource.DiscoveredResource + if agentsSource != "" { + discovered, _ := resource.AgentKind{}.Discover(agentsSource) + agents = resource.ActiveAgents(discovered) + } + + if len(agents) > 0 { + var builtinAgents map[string]config.TargetConfig + if s.IsProjectMode() { + builtinAgents = config.ProjectAgentTargets() + } else { + builtinAgents = config.DefaultAgentTargets() + } + + for name, target := range targets { + if filterTarget != "" && filterTarget != name { + continue + } + + ac := target.AgentsConfig() + agentPath := ac.Path + if agentPath == "" { + if builtin, ok := builtinAgents[name]; ok { + agentPath = builtin.Path + } + } + if agentPath == "" { + continue + } + agentPath = config.ExpandPath(agentPath) + + agentItems := computeAgentTargetDiff(agentPath, agents) + if len(agentItems) == 0 { + continue + } + + // Merge into existing diff for this target + merged := false + for i := range diffs { + if diffs[i].Target == name { + diffs[i].Items = append(diffs[i].Items, agentItems...) + merged = true + break + } + } + if !merged { + diffs = append(diffs, diffTarget{ + Target: name, + Items: agentItems, + }) + } + } + } + resp := map[string]any{"diffs": diffs} maps.Copy(resp, ignorePayload(ignoreStats)) writeJSON(w, resp) From d1ac3dd649e86a9d7ad57f3721840ad1af284c7f Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:41:16 +0800 Subject: [PATCH 153/205] feat(server): support kind param in POST /api/sync Accept 'kind' field in sync request body: - 'skill': sync skills only (current behavior) - 'agent': sync agents only via SyncAgents() - '' (empty): sync both skills and agents Agent sync uses existing SyncAgents/PruneOrphanAgentLinks/PruneOrphanAgentCopies. Remove agent-exists-but-no-target warning (now obsolete). --- internal/server/handler_sync.go | 265 ++++++++++++++++++++------------ 1 file changed, 170 insertions(+), 95 deletions(-) diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index 26282acc..b47c9837 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -4,8 +4,6 @@ import ( "encoding/json" "maps" "net/http" - "os" - "strings" "time" "skillshare/internal/config" @@ -51,11 +49,12 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { defer s.mu.Unlock() var body struct { - DryRun bool `json:"dryRun"` - Force bool `json:"force"` + DryRun bool `json:"dryRun"` + Force bool `json:"force"` + Kind string `json:"kind"` } if err := json.NewDecoder(r.Body).Decode(&body); err != nil { - // Default to non-dry-run, non-force + // Default to non-dry-run, non-force, empty kind (both) } globalMode := s.cfg.Mode @@ -70,97 +69,186 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { return } - // Discover skills once for all targets - allSkills, ignoreStats, err := ssync.DiscoverSourceSkillsWithStats(s.cfg.Source) - if err != nil { - writeError(w, http.StatusInternalServerError, "failed to discover skills: "+err.Error()) - return - } - - if len(allSkills) == 0 { - warnings = append(warnings, "source directory is empty (0 skills)") - } - - // Registry entries are managed by install/uninstall, not sync. - // Sync only manages symlinks — it must not prune registry entries - // for installed skills whose files may be missing from disk. - results := make([]syncTargetResult, 0) - for name, target := range s.cfg.Targets { - sc := target.SkillsConfig() - mode := sc.Mode - if mode == "" { - mode = globalMode - } + var ignoreStats *skillignore.IgnoreStats - res := syncTargetResult{ - Target: name, - Linked: make([]string, 0), - Updated: make([]string, 0), - Skipped: make([]string, 0), - Pruned: make([]string, 0), + // Skill sync (skip when kind == "agent") + if body.Kind != "agent" { + var allSkills []ssync.DiscoveredSkill + var err error + allSkills, ignoreStats, err = ssync.DiscoverSourceSkillsWithStats(s.cfg.Source) + if err != nil { + writeError(w, http.StatusInternalServerError, "failed to discover skills: "+err.Error()) + return } - syncErrArgs := map[string]any{ - "targets_total": len(s.cfg.Targets), - "targets_failed": 1, - "target": name, - "dry_run": body.DryRun, - "force": body.Force, - "scope": "ui", + if len(allSkills) == 0 { + warnings = append(warnings, "source directory is empty (0 skills)") } - switch mode { - case "merge": - mergeResult, err := ssync.SyncTargetMergeWithSkills(name, target, allSkills, s.cfg.Source, body.DryRun, body.Force, s.projectRoot) - if err != nil { - s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) - writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) - return - } - res.Linked = mergeResult.Linked - res.Updated = mergeResult.Updated - res.Skipped = mergeResult.Skipped - res.DirCreated = mergeResult.DirCreated - - pruneResult, err := ssync.PruneOrphanLinksWithSkills(ssync.PruneOptions{ - TargetPath: sc.Path, SourcePath: s.cfg.Source, Skills: allSkills, - Include: sc.Include, Exclude: sc.Exclude, TargetNaming: sc.TargetNaming, TargetName: name, - DryRun: body.DryRun, Force: body.Force, - }) - if err == nil { - res.Pruned = pruneResult.Removed + // Registry entries are managed by install/uninstall, not sync. + // Sync only manages symlinks — it must not prune registry entries + // for installed skills whose files may be missing from disk. + + for name, target := range s.cfg.Targets { + sc := target.SkillsConfig() + mode := sc.Mode + if mode == "" { + mode = globalMode } - case "copy": - copyResult, err := ssync.SyncTargetCopyWithSkills(name, target, allSkills, s.cfg.Source, body.DryRun, body.Force, nil) - if err != nil { - s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) - writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) - return + res := syncTargetResult{ + Target: name, + Linked: make([]string, 0), + Updated: make([]string, 0), + Skipped: make([]string, 0), + Pruned: make([]string, 0), } - res.Linked = copyResult.Copied - res.Updated = copyResult.Updated - res.Skipped = copyResult.Skipped - res.DirCreated = copyResult.DirCreated - - pruneResult, err := ssync.PruneOrphanCopiesWithSkills(sc.Path, allSkills, sc.Include, sc.Exclude, name, sc.TargetNaming, body.DryRun) - if err == nil { - res.Pruned = pruneResult.Removed + + syncErrArgs := map[string]any{ + "targets_total": len(s.cfg.Targets), + "targets_failed": 1, + "target": name, + "dry_run": body.DryRun, + "force": body.Force, + "scope": "ui", } - default: - err := ssync.SyncTarget(name, target, s.cfg.Source, body.DryRun, s.projectRoot) - if err != nil { - s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) - writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) - return + switch mode { + case "merge": + mergeResult, err := ssync.SyncTargetMergeWithSkills(name, target, allSkills, s.cfg.Source, body.DryRun, body.Force, s.projectRoot) + if err != nil { + s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) + writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) + return + } + res.Linked = mergeResult.Linked + res.Updated = mergeResult.Updated + res.Skipped = mergeResult.Skipped + res.DirCreated = mergeResult.DirCreated + + pruneResult, err := ssync.PruneOrphanLinksWithSkills(ssync.PruneOptions{ + TargetPath: sc.Path, SourcePath: s.cfg.Source, Skills: allSkills, + Include: sc.Include, Exclude: sc.Exclude, TargetNaming: sc.TargetNaming, TargetName: name, + DryRun: body.DryRun, Force: body.Force, + }) + if err == nil { + res.Pruned = pruneResult.Removed + } + + case "copy": + copyResult, err := ssync.SyncTargetCopyWithSkills(name, target, allSkills, s.cfg.Source, body.DryRun, body.Force, nil) + if err != nil { + s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) + writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) + return + } + res.Linked = copyResult.Copied + res.Updated = copyResult.Updated + res.Skipped = copyResult.Skipped + res.DirCreated = copyResult.DirCreated + + pruneResult, err := ssync.PruneOrphanCopiesWithSkills(sc.Path, allSkills, sc.Include, sc.Exclude, name, sc.TargetNaming, body.DryRun) + if err == nil { + res.Pruned = pruneResult.Removed + } + + default: + err := ssync.SyncTarget(name, target, s.cfg.Source, body.DryRun, s.projectRoot) + if err != nil { + s.writeOpsLog("sync", "error", start, syncErrArgs, err.Error()) + writeError(w, http.StatusInternalServerError, "sync failed for "+name+": "+err.Error()) + return + } + res.Linked = []string{"(symlink mode)"} } - res.Linked = []string{"(symlink mode)"} + + results = append(results, res) } + } + + // Agent sync (skip when kind == "skill") + if body.Kind != "skill" { + agentsSource := s.agentsSource() + if agentsSource != "" { + agentDiscovered, _ := resource.AgentKind{}.Discover(agentsSource) + agents := resource.ActiveAgents(agentDiscovered) + + if len(agents) > 0 { + var builtinAgents map[string]config.TargetConfig + if s.IsProjectMode() { + builtinAgents = config.ProjectAgentTargets() + } else { + builtinAgents = config.DefaultAgentTargets() + } - results = append(results, res) + for name, target := range s.cfg.Targets { + ac := target.AgentsConfig() + agentPath := ac.Path + if agentPath == "" { + if builtin, ok := builtinAgents[name]; ok { + agentPath = builtin.Path + } + } + if agentPath == "" { + continue + } + agentPath = config.ExpandPath(agentPath) + + agentMode := ac.Mode + if agentMode == "" { + agentMode = "merge" + } + + agentResult, err := ssync.SyncAgents(agents, agentsSource, agentPath, agentMode, body.DryRun, body.Force) + if err != nil { + warnings = append(warnings, "agent sync failed for "+name+": "+err.Error()) + continue + } + + // Merge into existing result or create new + merged := false + for i := range results { + if results[i].Target == name { + results[i].Linked = append(results[i].Linked, agentResult.Linked...) + results[i].Updated = append(results[i].Updated, agentResult.Updated...) + results[i].Skipped = append(results[i].Skipped, agentResult.Skipped...) + merged = true + break + } + } + if !merged && (len(agentResult.Linked) > 0 || len(agentResult.Updated) > 0 || len(agentResult.Skipped) > 0) { + results = append(results, syncTargetResult{ + Target: name, + Linked: agentResult.Linked, + Updated: agentResult.Updated, + Skipped: agentResult.Skipped, + Pruned: make([]string, 0), + }) + } + + // Prune orphan agents + if agentMode == "merge" { + pruned, _ := ssync.PruneOrphanAgentLinks(agentPath, agents, body.DryRun) + for i := range results { + if results[i].Target == name { + results[i].Pruned = append(results[i].Pruned, pruned...) + break + } + } + } else if agentMode == "copy" { + pruned, _ := ssync.PruneOrphanAgentCopies(agentPath, agents, body.DryRun) + for i := range results { + if results[i].Target == name { + results[i].Pruned = append(results[i].Pruned, pruned...) + break + } + } + } + } + } + } } // Log the sync operation @@ -169,23 +257,10 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { "targets_failed": 0, "dry_run": body.DryRun, "force": body.Force, + "kind": body.Kind, "scope": "ui", }, "") - // Check if agents exist in source but no configured target supports them - agentsDir := s.agentsSource() - if agentsDir != "" && !s.cfg.HasAgentTarget() { - if entries, err := os.ReadDir(agentsDir); err == nil { - for _, e := range entries { - if !e.IsDir() && strings.HasSuffix(strings.ToLower(e.Name()), ".md") && - !strings.HasSuffix(strings.ToLower(e.Name()), ".skillshare-meta.json") { - warnings = append(warnings, "Agents exist in source but none of your configured targets support agents. Agent files will not be synced.") - break - } - } - } - } - resp := map[string]any{ "results": results, "warnings": warnings, From dfd69b7ea0aea0ad44f3bc91860216f6d9b05c43 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:42:36 +0800 Subject: [PATCH 154/205] feat(ui): add kind param to sync API client Accepts 'skill' | 'agent' to control sync scope. --- ui/src/api/client.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index 7afba776..76f361fa 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -217,7 +217,7 @@ export const api = { }), // Sync - sync: (opts: { dryRun?: boolean; force?: boolean }) => + sync: (opts: { dryRun?: boolean; force?: boolean; kind?: 'skill' | 'agent' }) => apiFetch('/sync', { method: 'POST', body: JSON.stringify(opts), From 3dfbff7f535cd31b05142df6b3b21e8ae7fe5c36 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:44:33 +0800 Subject: [PATCH 155/205] feat(ui): add sync scope control and kind-separated summary - SegmentedControl with Skills / Agents / Both options (default: Both) - Sync button text and API kind param change with selection - Diff summary shows separate skill/agent counts with KindBadge - Split button labels (Force Sync) also reflect scope --- ui/src/pages/SyncPage.tsx | 110 ++++++++++++++++++++++++-------------- 1 file changed, 71 insertions(+), 39 deletions(-) diff --git a/ui/src/pages/SyncPage.tsx b/ui/src/pages/SyncPage.tsx index e422c7ea..7b700ac9 100644 --- a/ui/src/pages/SyncPage.tsx +++ b/ui/src/pages/SyncPage.tsx @@ -28,6 +28,7 @@ import StreamProgressBar from '../components/StreamProgressBar'; import SyncResultList from '../components/SyncResultList'; import { radius, shadows } from '../design'; import KindBadge from '../components/KindBadge'; +import SegmentedControl from '../components/SegmentedControl'; function extractIgnoreSources(data: IgnoreSources): IgnoreSources { return { @@ -47,6 +48,7 @@ export default function SyncPage() { const [ignoreSources, setIgnoreSources] = useState(null); const [ignoredExpanded, setIgnoredExpanded] = useState(false); const { toast } = useToast(); + const [syncScope, setSyncScope] = useState<'skill' | 'agent' | 'both'>('both'); const toastRef = useRef(toast); useEffect(() => { toastRef.current = toast; }); @@ -95,7 +97,11 @@ export default function SyncPage() { setLastDryRun(dryRun); setSyncWarnings([]); try { - const res = await api.sync({ dryRun, force }); + const res = await api.sync({ + dryRun, + force, + ...(syncScope !== 'both' ? { kind: syncScope } : {}), + }); setResults(res.results); setSyncWarnings(res.warnings ?? []); setIgnoreSources(extractIgnoreSources(res)); @@ -116,29 +122,29 @@ export default function SyncPage() { // Derived ignored skills list const ignoredSkills = ignoreSources?.ignored_skills ?? []; - // Calculate diff summary + // Calculate diff summary by kind const diffs = diffData ?? []; - const totalActions = diffs.reduce((sum, d) => sum + (d.items?.length ?? 0), 0); - const pendingLinks = diffs.reduce( - (sum, d) => sum + (d.items?.filter((i) => i.action === 'link').length ?? 0), - 0, - ); - const pendingUpdates = diffs.reduce( - (sum, d) => sum + (d.items?.filter((i) => i.action === 'update').length ?? 0), - 0, - ); - const pendingPrunes = diffs.reduce( - (sum, d) => sum + (d.items?.filter((i) => i.action === 'prune').length ?? 0), - 0, - ); - const pendingSkips = diffs.reduce( - (sum, d) => sum + (d.items?.filter((i) => i.action === 'skip').length ?? 0), - 0, - ); - const pendingLocal = diffs.reduce( - (sum, d) => sum + (d.items?.filter((i) => i.action === 'local').length ?? 0), - 0, - ); + const allItems = diffs.flatMap((d) => d.items ?? []); + + const countByKindAction = (kind: string, action: string) => + allItems.filter((i) => (i.kind ?? 'skill') === kind && i.action === action).length; + + const skillLinks = countByKindAction('skill', 'link'); + const skillUpdates = countByKindAction('skill', 'update'); + const skillPrunes = countByKindAction('skill', 'prune'); + const skillSkips = countByKindAction('skill', 'skip'); + const skillLocal = countByKindAction('skill', 'local'); + const skillSync = skillLinks + skillUpdates + skillPrunes + skillSkips; + + const agentLinks = countByKindAction('agent', 'link'); + const agentUpdates = countByKindAction('agent', 'update'); + const agentPrunes = countByKindAction('agent', 'prune'); + const agentSkips = countByKindAction('agent', 'skip'); + const agentLocal = countByKindAction('agent', 'local'); + const agentSync = agentLinks + agentUpdates + agentPrunes + agentSkips; + + const totalActions = allItems.length; + const pendingLocal = skillLocal + agentLocal; const syncActions = totalActions - pendingLocal; return ( @@ -217,14 +223,25 @@ export default function SyncPage() { {diffLoading ? (

Checking status...

) : syncActions > 0 ? ( -
- - Pending changes: - - {pendingLinks > 0 && {pendingLinks} to link} - {pendingUpdates > 0 && {pendingUpdates} to update} - {pendingSkips > 0 && {pendingSkips} skipped} - {pendingPrunes > 0 && {pendingPrunes} to prune} +
+ {skillSync > 0 && ( +
+ + {skillLinks > 0 && {skillLinks} to link} + {skillUpdates > 0 && {skillUpdates} to update} + {skillSkips > 0 && {skillSkips} skipped} + {skillPrunes > 0 && {skillPrunes} to prune} +
+ )} + {agentSync > 0 && ( +
+ + {agentLinks > 0 && {agentLinks} to link} + {agentUpdates > 0 && {agentUpdates} to update} + {agentSkips > 0 && {agentSkips} skipped} + {agentPrunes > 0 && {agentPrunes} to prune} +
+ )} {pendingLocal > 0 && {pendingLocal} local only} {ignoredSkills.length > 0 && ( {ignoredSkills.length} ignored @@ -234,9 +251,7 @@ export default function SyncPage() {
- - All targets are in sync! - + All targets are in sync!
{pendingLocal} local only {ignoredSkills.length > 0 && ( @@ -247,9 +262,7 @@ export default function SyncPage() {
- - All targets are in sync! - + All targets are in sync!
{ignoredSkills.length > 0 && ( {ignoredSkills.length} ignored @@ -257,6 +270,19 @@ export default function SyncPage() {
)} + {/* Scope selector */} + + {/* Sync split button */} handleSync()} @@ -267,7 +293,7 @@ export default function SyncPage() { dropdownAlign="right" items={[ { - label: 'Force Sync', + label: syncScope === 'agent' ? 'Force Sync Agents' : syncScope === 'skill' ? 'Force Sync Skills' : 'Force Sync', icon: , onClick: () => handleSync({ force: true }), confirm: true, @@ -280,7 +306,13 @@ export default function SyncPage() { ]} > {!syncing && } - {syncing ? 'Syncing...' : 'Sync Now'} + {syncing + ? 'Syncing...' + : syncScope === 'skill' + ? 'Sync Skills' + : syncScope === 'agent' + ? 'Sync Agents' + : 'Sync Now'}
From 568d2b7f649848a6e2ddd1097ae3b63b44904652 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:50:42 +0800 Subject: [PATCH 156/205] fix: address code review findings - Fix data race: read agentsSource inside RLock in handleDiffStream - Validate kind param in handleSync (reject invalid values) - Add test for symlink-points-elsewhere agent diff case - Always show KindBadge on DiffItemRow (default to 'skill') --- internal/server/handler_agent_diff_test.go | 30 ++++++++++++++++++++++ internal/server/handler_diff_stream.go | 2 +- internal/server/handler_sync.go | 5 ++++ ui/src/pages/SyncPage.tsx | 2 +- 4 files changed, 37 insertions(+), 2 deletions(-) diff --git a/internal/server/handler_agent_diff_test.go b/internal/server/handler_agent_diff_test.go index 3deefd9a..7f125109 100644 --- a/internal/server/handler_agent_diff_test.go +++ b/internal/server/handler_agent_diff_test.go @@ -56,6 +56,36 @@ func TestComputeAgentTargetDiff_LocalFile(t *testing.T) { } } +func TestComputeAgentTargetDiff_SymlinkPointsElsewhere(t *testing.T) { + sourceDir := t.TempDir() + otherDir := t.TempDir() + targetDir := t.TempDir() + + srcFile := filepath.Join(sourceDir, "agent.md") + os.WriteFile(srcFile, []byte("# Agent"), 0644) + otherFile := filepath.Join(otherDir, "agent.md") + os.WriteFile(otherFile, []byte("# Other"), 0644) + + // Symlink points to otherFile, not srcFile + os.Symlink(otherFile, filepath.Join(targetDir, "agent.md")) + + agents := []resource.DiscoveredResource{ + {FlatName: "agent.md", AbsPath: srcFile, RelPath: "agent.md"}, + } + + items := computeAgentTargetDiff(targetDir, agents) + + if len(items) != 1 { + t.Fatalf("expected 1 item, got %d", len(items)) + } + if items[0].Action != "update" { + t.Errorf("expected action 'update', got %q", items[0].Action) + } + if items[0].Reason != "symlink points elsewhere" { + t.Errorf("expected reason 'symlink points elsewhere', got %q", items[0].Reason) + } +} + func TestComputeAgentTargetDiff_InSync(t *testing.T) { sourceDir := t.TempDir() targetDir := t.TempDir() diff --git a/internal/server/handler_diff_stream.go b/internal/server/handler_diff_stream.go index 12f8c38a..84de9b8c 100644 --- a/internal/server/handler_diff_stream.go +++ b/internal/server/handler_diff_stream.go @@ -29,6 +29,7 @@ func (s *Server) handleDiffStream(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before slow I/O. s.mu.RLock() source := s.cfg.Source + agentsSource := s.agentsSource() globalMode := s.cfg.Mode targets := s.cloneTargets() s.mu.RUnlock() @@ -68,7 +69,6 @@ func (s *Server) handleDiffStream(w http.ResponseWriter, r *http.Request) { } // Agent diffs — discover agents and compute per-target diffs - agentsSource := s.agentsSource() var agents []resource.DiscoveredResource if agentsSource != "" { discovered, _ := resource.AgentKind{}.Discover(agentsSource) diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index b47c9837..b5b4cd5e 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -57,6 +57,11 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { // Default to non-dry-run, non-force, empty kind (both) } + if body.Kind != "" && body.Kind != "skill" && body.Kind != "agent" { + writeError(w, http.StatusBadRequest, "invalid kind: must be 'skill', 'agent', or empty") + return + } + globalMode := s.cfg.Mode if globalMode == "" { globalMode = "merge" diff --git a/ui/src/pages/SyncPage.tsx b/ui/src/pages/SyncPage.tsx index 7b700ac9..d2f101cd 100644 --- a/ui/src/pages/SyncPage.tsx +++ b/ui/src/pages/SyncPage.tsx @@ -573,7 +573,7 @@ function DiffItemRow({ item }: { item: { action: string; skill: string; reason?:
- {item.kind && } + {item.skill} From 294be5f08a1f360de6fcc76c8d042057e30d49c4 Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:53:25 +0800 Subject: [PATCH 157/205] docs(ui): update sync page and API description for kind param - Sync page now has scope selector (Skills / Agents / Both) - POST /api/sync accepts kind param --- website/docs/reference/commands/ui.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/reference/commands/ui.md b/website/docs/reference/commands/ui.md index e20d0bfd..16f2e6ca 100644 --- a/website/docs/reference/commands/ui.md +++ b/website/docs/reference/commands/ui.md @@ -63,7 +63,7 @@ skillshare ui --no-open & | **New Skill** | Step-by-step wizard to create a skill: Name → Pattern → Category → Scaffold → Confirm. Accessible via the **"+ New Skill"** button on the Skills page | | **Install** | Install from local path, git URL, or GitHub shorthand | | **Targets** | Target list with status badges. Add/remove targets | -| **Sync** | Sync controls with dry-run toggle. Diff preview | +| **Sync** | Sync controls with scope selector (Skills / Agents / Both), dry-run toggle, and diff preview with kind-separated summary | | **Collect** | Scan targets and collect selected skills back to source. Agent collect remains CLI-only. | | **Backup** | View backup list, restore snapshots, and clean up entries | | **Git Sync** | Push/pull source repo with dirty-state checks and force pull | @@ -123,7 +123,7 @@ The web dashboard exposes a REST API at `/api/`. All endpoints return JSON. | GET | `/api/targets` | List targets with status, include/exclude filters, and per-target expected counts | | POST | `/api/targets` | Add a target | | DELETE | `/api/targets/{name}` | Remove a target | -| POST | `/api/sync` | Run sync (supports `dryRun`, `force`) | +| POST | `/api/sync` | Run sync (supports `dryRun`, `force`, `kind`) | | GET | `/api/diff` | Diff between source and targets | | GET | `/api/search?q=` | Search GitHub for skills | | POST | `/api/install` | Install a skill from source | From d4bd0a66dffd4d2339df735c073b33b50eb644fa Mon Sep 17 00:00:00 2001 From: Willie Date: Thu, 9 Apr 2026 23:56:03 +0800 Subject: [PATCH 158/205] docs(sync,diff): fix agent sync description and add agent diff section - Fix sync.md: 'sync' defaults to skills only, not skills+agents - Fix --all flag description: syncs skills + agents + extras - Add 'sync agents' to usage examples - Add Agent Diff section to diff.md with usage examples --- website/docs/reference/commands/diff.md | 16 +++++++++++++++- website/docs/reference/commands/sync.md | 12 +++++++----- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/website/docs/reference/commands/diff.md b/website/docs/reference/commands/diff.md index 214bed5d..d1244b71 100644 --- a/website/docs/reference/commands/diff.md +++ b/website/docs/reference/commands/diff.md @@ -9,9 +9,9 @@ Show differences between source and targets. ```bash skillshare diff # All targets (interactive TUI) skillshare diff claude # Specific target +skillshare diff agents # Agent targets only skillshare diff --stat # File-level changes skillshare diff --patch # Full unified diff -skillshare diff # Includes extras automatically ``` ![diff demo](/img/diff-demo.png) @@ -184,6 +184,20 @@ skillshare diff claude # See exactly what's different skillshare sync # Fix it ``` +## Agent Diff {#agent-diff} + +Use the `agents` keyword to diff only agent targets: + +```bash +skillshare diff agents # All agent-capable targets +skillshare diff agents claude # Specific target +skillshare diff agents --json # JSON output +``` + +Agent diff shows missing agents (need sync), orphan symlinks (need prune), and local-only agent files. Only targets with an `agents` path configuration are included. See [Agents — Supported Targets](/docs/understand/agents#supported-targets) for the full list. + +--- + ## Options | Flag | Description | diff --git a/website/docs/reference/commands/sync.md b/website/docs/reference/commands/sync.md index a16b5cab..8fdd7c8f 100644 --- a/website/docs/reference/commands/sync.md +++ b/website/docs/reference/commands/sync.md @@ -86,7 +86,8 @@ Push skills from source to all targets. ```bash skillshare sync # Sync skills to all targets -skillshare sync --all # Sync skills + extras +skillshare sync agents # Sync agents only +skillshare sync --all # Sync skills + agents + extras skillshare sync --dry-run # Preview changes skillshare sync -n # Short form skillshare sync --force # Overwrite all managed skills @@ -95,7 +96,7 @@ skillshare sync -f # Short form | Flag | Short | Description | |------|-------|-------------| -| `--all` | | Also sync extras after skills (see [sync extras](#sync-extras) below) | +| `--all` | | Also sync agents and extras after skills | | `--dry-run` | `-n` | Preview changes without writing | | `--force` | `-f` | Overwrite all managed entries regardless of checksum (copy mode) or replace existing directories with symlinks (merge mode) | | `--json` | | Output as JSON | @@ -481,11 +482,12 @@ flowchart TD ## Agent Sync {#agent-sync} -`skillshare sync` automatically syncs agents alongside skills to targets that support agents. You can also sync agents independently: +Agents are synced separately from skills. Use `sync agents` for agent-only sync, or `sync --all` to sync everything: ```bash -skillshare sync # Sync skills + agents to all targets -skillshare sync agents # Sync only agents +skillshare sync # Sync skills only (default) +skillshare sync agents # Sync agents only +skillshare sync --all # Sync skills + agents + extras ``` Agent sync supports all three modes (merge, copy, symlink), matching the target's configured mode. Only targets with an `agents` path definition receive agent syncs — currently Claude, Cursor, OpenCode, and Augment. See [Agents — Supported Targets](/docs/understand/agents#supported-targets) for the full list. From a73d55dc266ced12259b15f36584a4f6edfa0208 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 00:02:53 +0800 Subject: [PATCH 159/205] refactor: extract agent helpers and simplify duplicated patterns - Extract agent_helpers.go: kind constants, discoverActiveAgents(), resolveAgentPath(), builtinAgentTargets(), mergeAgentDiffItems() - Replace 3 inline agent path resolution blocks with resolveAgentPath() - Replace 3 inline agent discovery blocks with discoverActiveAgents() - Replace 2 inline diff merge blocks with mergeAgentDiffItems() - Fix handleSync prune triple-scan: capture index from merge pass - Frontend: replace 10 filter() calls with single useMemo reduce pass - Use kind constants instead of raw strings across all server files --- internal/server/agent_helpers.go | 57 ++++++++ internal/server/handler_agent_diff.go | 10 +- internal/server/handler_diff_stream.go | 91 ++++--------- internal/server/handler_sync.go | 172 ++++++++----------------- ui/src/pages/SyncPage.tsx | 54 ++++---- 5 files changed, 167 insertions(+), 217 deletions(-) create mode 100644 internal/server/agent_helpers.go diff --git a/internal/server/agent_helpers.go b/internal/server/agent_helpers.go new file mode 100644 index 00000000..bdc73d22 --- /dev/null +++ b/internal/server/agent_helpers.go @@ -0,0 +1,57 @@ +package server + +import ( + "skillshare/internal/config" + "skillshare/internal/resource" +) + +// Kind constants for diff/sync operations. +const ( + kindSkill = "skill" + kindAgent = "agent" +) + +// discoverActiveAgents discovers agents from the given source directory, +// returning only non-disabled agents. Returns nil if source is empty. +func discoverActiveAgents(agentsSource string) []resource.DiscoveredResource { + if agentsSource == "" { + return nil + } + discovered, _ := resource.AgentKind{}.Discover(agentsSource) + return resource.ActiveAgents(discovered) +} + +// resolveAgentPath returns the expanded agent target path for a target, +// checking user config first, then builtin defaults. Returns "" if no path. +func resolveAgentPath(target config.TargetConfig, builtinAgents map[string]config.TargetConfig, name string) string { + if ac := target.AgentsConfig(); ac.Path != "" { + return config.ExpandPath(ac.Path) + } + if builtin, ok := builtinAgents[name]; ok { + return config.ExpandPath(builtin.Path) + } + return "" +} + +// builtinAgentTargets returns the builtin agent target map for the server's mode. +func (s *Server) builtinAgentTargets() map[string]config.TargetConfig { + if s.IsProjectMode() { + return config.ProjectAgentTargets() + } + return config.DefaultAgentTargets() +} + +// mergeAgentDiffItems appends agent diff items into the existing diffs slice, +// merging with an existing target entry or creating a new one. +func mergeAgentDiffItems(diffs []diffTarget, name string, items []diffItem) []diffTarget { + for i := range diffs { + if diffs[i].Target == name { + diffs[i].Items = append(diffs[i].Items, items...) + return diffs + } + } + return append(diffs, diffTarget{ + Target: name, + Items: items, + }) +} diff --git a/internal/server/handler_agent_diff.go b/internal/server/handler_agent_diff.go index de2d1322..efbdabe5 100644 --- a/internal/server/handler_agent_diff.go +++ b/internal/server/handler_agent_diff.go @@ -41,7 +41,7 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour Skill: flatName, Action: "link", Reason: "source only", - Kind: "agent", + Kind: kindAgent, }) continue } @@ -54,7 +54,7 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour Skill: flatName, Action: "update", Reason: "link target unreadable", - Kind: "agent", + Kind: kindAgent, }) continue } @@ -64,7 +64,7 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour Skill: flatName, Action: "update", Reason: "symlink points elsewhere", - Kind: "agent", + Kind: kindAgent, }) } // else: in sync, no item emitted @@ -82,14 +82,14 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour Skill: name, Action: "prune", Reason: "orphan symlink", - Kind: "agent", + Kind: kindAgent, }) } else { items = append(items, diffItem{ Skill: name, Action: "local", Reason: "local file", - Kind: "agent", + Kind: kindAgent, }) } } diff --git a/internal/server/handler_diff_stream.go b/internal/server/handler_diff_stream.go index 84de9b8c..3b5da473 100644 --- a/internal/server/handler_diff_stream.go +++ b/internal/server/handler_diff_stream.go @@ -7,7 +7,6 @@ import ( "path/filepath" "skillshare/internal/config" - "skillshare/internal/resource" ssync "skillshare/internal/sync" "skillshare/internal/utils" ) @@ -68,59 +67,21 @@ func (s *Server) handleDiffStream(w http.ResponseWriter, r *http.Request) { }) } - // Agent diffs — discover agents and compute per-target diffs - var agents []resource.DiscoveredResource - if agentsSource != "" { - discovered, _ := resource.AgentKind{}.Discover(agentsSource) - agents = resource.ActiveAgents(discovered) - } - - if len(agents) > 0 { - var builtinAgents map[string]config.TargetConfig - if s.IsProjectMode() { - builtinAgents = config.ProjectAgentTargets() - } else { - builtinAgents = config.DefaultAgentTargets() - } - + // Agent diffs + if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { + builtinAgents := s.builtinAgentTargets() for name, target := range targets { select { case <-ctx.Done(): return default: } - - ac := target.AgentsConfig() - agentPath := ac.Path + agentPath := resolveAgentPath(target, builtinAgents, name) if agentPath == "" { - if builtin, ok := builtinAgents[name]; ok { - agentPath = builtin.Path - } - } - if agentPath == "" { - continue - } - agentPath = config.ExpandPath(agentPath) - - agentItems := computeAgentTargetDiff(agentPath, agents) - if len(agentItems) == 0 { continue } - - // Merge into existing diff for this target - merged := false - for i := range diffs { - if diffs[i].Target == name { - diffs[i].Items = append(diffs[i].Items, agentItems...) - merged = true - break - } - } - if !merged { - diffs = append(diffs, diffTarget{ - Target: name, - Items: agentItems, - }) + if items := computeAgentTargetDiff(agentPath, agents); len(items) > 0 { + diffs = mergeAgentDiffItems(diffs, name, items) } } } @@ -144,7 +105,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if mode == "symlink" { status := ssync.CheckStatus(sc.Path, source) if status != ssync.StatusLinked { - dt.Items = append(dt.Items, diffItem{Skill: "(entire directory)", Action: "link", Reason: "source only", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: "(entire directory)", Action: "link", Reason: "source only", Kind: kindSkill}) } return dt } @@ -159,7 +120,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc TargetNaming: sc.TargetNaming, }, filtered) if err != nil { - dt.Items = append(dt.Items, diffItem{Skill: "(target naming)", Action: "skip", Reason: err.Error(), Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: "(target naming)", Action: "skip", Reason: err.Error(), Kind: kindSkill}) return dt } // Surface collision/validation stats so the UI can show why skills were skipped @@ -177,23 +138,23 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if !isManaged { if info, statErr := os.Stat(targetSkillPath); statErr == nil { if info.IsDir() { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: kindSkill}) } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: kindSkill}) } } else if os.IsNotExist(statErr) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: kindSkill}) } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: kindSkill}) } } else { targetInfo, statErr := os.Stat(targetSkillPath) if os.IsNotExist(statErr) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "missing (deleted from target)", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "missing (deleted from target)", Kind: kindSkill}) } else if statErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot access target entry", Kind: kindSkill}) } else if !targetInfo.IsDir() { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "target entry is not a directory", Kind: kindSkill}) } else { oldMtime := manifest.Mtimes[resolved.TargetName] currentMtime, mtimeErr := ssync.DirMaxMtime(skill.SourcePath) @@ -202,9 +163,9 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc } srcChecksum, checksumErr := ssync.DirChecksum(skill.SourcePath) if checksumErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot compute checksum", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "cannot compute checksum", Kind: kindSkill}) } else if srcChecksum != oldChecksum { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "content changed", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "content changed", Kind: kindSkill}) } } } @@ -214,7 +175,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc continue } if !validNames[managedName] { - dt.Items = append(dt.Items, diffItem{Skill: managedName, Action: "prune", Reason: "orphan copy", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: managedName, Action: "prune", Reason: "orphan copy", Kind: kindSkill}) } } return dt @@ -227,7 +188,7 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc _, err := os.Lstat(targetSkillPath) if err != nil { if os.IsNotExist(err) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "link", Reason: "source only", Kind: kindSkill}) } continue } @@ -235,15 +196,15 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc if utils.IsSymlinkOrJunction(targetSkillPath) { absLink, linkErr := utils.ResolveLinkTarget(targetSkillPath) if linkErr != nil { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "link target unreadable", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "link target unreadable", Kind: kindSkill}) continue } absSource, _ := filepath.Abs(skill.SourcePath) if !utils.PathsEqual(absLink, absSource) { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "symlink points elsewhere", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "update", Reason: "symlink points elsewhere", Kind: kindSkill}) } } else { - dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: resolved.TargetName, Action: "skip", Reason: "local copy (sync --force to replace)", Kind: kindSkill}) } } @@ -271,16 +232,16 @@ func (s *Server) computeTargetDiff(name string, target config.TargetConfig, disc } absSource, _ := filepath.Abs(source) if utils.PathHasPrefix(absLink, absSource+string(filepath.Separator)) { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan symlink", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan symlink", Kind: kindSkill}) } } else if info.IsDir() { if _, inManifest := manifest.Managed[eName]; inManifest { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory (manifest)", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory (manifest)", Kind: kindSkill}) } else { if resolution.Naming == "flat" && (utils.HasNestedSeparator(eName) || utils.IsTrackedRepoDir(eName)) { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "prune", Reason: "orphan managed directory", Kind: kindSkill}) } else { - dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "local", Reason: "local only", Kind: "skill"}) + dt.Items = append(dt.Items, diffItem{Skill: eName, Action: "local", Reason: "local only", Kind: kindSkill}) } } } diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index b5b4cd5e..c7f63ff8 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -7,7 +7,6 @@ import ( "time" "skillshare/internal/config" - "skillshare/internal/resource" "skillshare/internal/skillignore" ssync "skillshare/internal/sync" ) @@ -57,7 +56,7 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { // Default to non-dry-run, non-force, empty kind (both) } - if body.Kind != "" && body.Kind != "skill" && body.Kind != "agent" { + if body.Kind != "" && body.Kind != kindSkill && body.Kind != kindAgent { writeError(w, http.StatusBadRequest, "invalid kind: must be 'skill', 'agent', or empty") return } @@ -79,7 +78,7 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { var ignoreStats *skillignore.IgnoreStats // Skill sync (skip when kind == "agent") - if body.Kind != "agent" { + if body.Kind != kindAgent { var allSkills []ssync.DiscoveredSkill var err error allSkills, ignoreStats, err = ssync.DiscoverSourceSkillsWithStats(s.cfg.Source) @@ -174,83 +173,60 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { } // Agent sync (skip when kind == "skill") - if body.Kind != "skill" { + if body.Kind != kindSkill { agentsSource := s.agentsSource() - if agentsSource != "" { - agentDiscovered, _ := resource.AgentKind{}.Discover(agentsSource) - agents := resource.ActiveAgents(agentDiscovered) - - if len(agents) > 0 { - var builtinAgents map[string]config.TargetConfig - if s.IsProjectMode() { - builtinAgents = config.ProjectAgentTargets() - } else { - builtinAgents = config.DefaultAgentTargets() - } + if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { + builtinAgents := s.builtinAgentTargets() - for name, target := range s.cfg.Targets { - ac := target.AgentsConfig() - agentPath := ac.Path - if agentPath == "" { - if builtin, ok := builtinAgents[name]; ok { - agentPath = builtin.Path - } - } - if agentPath == "" { - continue - } - agentPath = config.ExpandPath(agentPath) + for name, target := range s.cfg.Targets { + agentPath := resolveAgentPath(target, builtinAgents, name) + if agentPath == "" { + continue + } - agentMode := ac.Mode - if agentMode == "" { - agentMode = "merge" - } + agentMode := target.AgentsConfig().Mode + if agentMode == "" { + agentMode = "merge" + } - agentResult, err := ssync.SyncAgents(agents, agentsSource, agentPath, agentMode, body.DryRun, body.Force) - if err != nil { - warnings = append(warnings, "agent sync failed for "+name+": "+err.Error()) - continue - } + agentResult, err := ssync.SyncAgents(agents, agentsSource, agentPath, agentMode, body.DryRun, body.Force) + if err != nil { + warnings = append(warnings, "agent sync failed for "+name+": "+err.Error()) + continue + } - // Merge into existing result or create new - merged := false - for i := range results { - if results[i].Target == name { - results[i].Linked = append(results[i].Linked, agentResult.Linked...) - results[i].Updated = append(results[i].Updated, agentResult.Updated...) - results[i].Skipped = append(results[i].Skipped, agentResult.Skipped...) - merged = true - break - } - } - if !merged && (len(agentResult.Linked) > 0 || len(agentResult.Updated) > 0 || len(agentResult.Skipped) > 0) { - results = append(results, syncTargetResult{ - Target: name, - Linked: agentResult.Linked, - Updated: agentResult.Updated, - Skipped: agentResult.Skipped, - Pruned: make([]string, 0), - }) + // Find or create result entry for this target + idx := -1 + for i := range results { + if results[i].Target == name { + idx = i + break } + } + if idx >= 0 { + results[idx].Linked = append(results[idx].Linked, agentResult.Linked...) + results[idx].Updated = append(results[idx].Updated, agentResult.Updated...) + results[idx].Skipped = append(results[idx].Skipped, agentResult.Skipped...) + } else if len(agentResult.Linked) > 0 || len(agentResult.Updated) > 0 || len(agentResult.Skipped) > 0 { + results = append(results, syncTargetResult{ + Target: name, + Linked: agentResult.Linked, + Updated: agentResult.Updated, + Skipped: agentResult.Skipped, + Pruned: make([]string, 0), + }) + idx = len(results) - 1 + } - // Prune orphan agents - if agentMode == "merge" { - pruned, _ := ssync.PruneOrphanAgentLinks(agentPath, agents, body.DryRun) - for i := range results { - if results[i].Target == name { - results[i].Pruned = append(results[i].Pruned, pruned...) - break - } - } - } else if agentMode == "copy" { - pruned, _ := ssync.PruneOrphanAgentCopies(agentPath, agents, body.DryRun) - for i := range results { - if results[i].Target == name { - results[i].Pruned = append(results[i].Pruned, pruned...) - break - } - } - } + // Prune orphan agents — reuse idx to avoid re-scanning + var pruned []string + if agentMode == "merge" { + pruned, _ = ssync.PruneOrphanAgentLinks(agentPath, agents, body.DryRun) + } else if agentMode == "copy" { + pruned, _ = ssync.PruneOrphanAgentCopies(agentPath, agents, body.DryRun) + } + if idx >= 0 && len(pruned) > 0 { + results[idx].Pruned = append(results[idx].Pruned, pruned...) } } } @@ -317,57 +293,19 @@ func (s *Server) handleDiff(w http.ResponseWriter, r *http.Request) { diffs = append(diffs, s.computeTargetDiff(name, target, discovered, globalMode, source)) } - // Agent diffs — discover agents and compute per-target diffs - var agents []resource.DiscoveredResource - if agentsSource != "" { - discovered, _ := resource.AgentKind{}.Discover(agentsSource) - agents = resource.ActiveAgents(discovered) - } - - if len(agents) > 0 { - var builtinAgents map[string]config.TargetConfig - if s.IsProjectMode() { - builtinAgents = config.ProjectAgentTargets() - } else { - builtinAgents = config.DefaultAgentTargets() - } - + // Agent diffs + if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { + builtinAgents := s.builtinAgentTargets() for name, target := range targets { if filterTarget != "" && filterTarget != name { continue } - - ac := target.AgentsConfig() - agentPath := ac.Path - if agentPath == "" { - if builtin, ok := builtinAgents[name]; ok { - agentPath = builtin.Path - } - } + agentPath := resolveAgentPath(target, builtinAgents, name) if agentPath == "" { continue } - agentPath = config.ExpandPath(agentPath) - - agentItems := computeAgentTargetDiff(agentPath, agents) - if len(agentItems) == 0 { - continue - } - - // Merge into existing diff for this target - merged := false - for i := range diffs { - if diffs[i].Target == name { - diffs[i].Items = append(diffs[i].Items, agentItems...) - merged = true - break - } - } - if !merged { - diffs = append(diffs, diffTarget{ - Target: name, - Items: agentItems, - }) + if items := computeAgentTargetDiff(agentPath, agents); len(items) > 0 { + diffs = mergeAgentDiffItems(diffs, name, items) } } } diff --git a/ui/src/pages/SyncPage.tsx b/ui/src/pages/SyncPage.tsx index d2f101cd..d20c73fb 100644 --- a/ui/src/pages/SyncPage.tsx +++ b/ui/src/pages/SyncPage.tsx @@ -122,30 +122,24 @@ export default function SyncPage() { // Derived ignored skills list const ignoredSkills = ignoreSources?.ignored_skills ?? []; - // Calculate diff summary by kind + // Calculate diff summary by kind (single pass) const diffs = diffData ?? []; - const allItems = diffs.flatMap((d) => d.items ?? []); - - const countByKindAction = (kind: string, action: string) => - allItems.filter((i) => (i.kind ?? 'skill') === kind && i.action === action).length; - - const skillLinks = countByKindAction('skill', 'link'); - const skillUpdates = countByKindAction('skill', 'update'); - const skillPrunes = countByKindAction('skill', 'prune'); - const skillSkips = countByKindAction('skill', 'skip'); - const skillLocal = countByKindAction('skill', 'local'); - const skillSync = skillLinks + skillUpdates + skillPrunes + skillSkips; - - const agentLinks = countByKindAction('agent', 'link'); - const agentUpdates = countByKindAction('agent', 'update'); - const agentPrunes = countByKindAction('agent', 'prune'); - const agentSkips = countByKindAction('agent', 'skip'); - const agentLocal = countByKindAction('agent', 'local'); - const agentSync = agentLinks + agentUpdates + agentPrunes + agentSkips; + const counts = useMemo(() => { + const c = { skill: { link: 0, update: 0, prune: 0, skip: 0, local: 0 }, agent: { link: 0, update: 0, prune: 0, skip: 0, local: 0 } }; + for (const d of diffs) { + for (const i of d.items ?? []) { + const kind = (i.kind ?? 'skill') as 'skill' | 'agent'; + const action = i.action as keyof typeof c.skill; + if (c[kind] && action in c[kind]) c[kind][action]++; + } + } + return c; + }, [diffs]); - const totalActions = allItems.length; - const pendingLocal = skillLocal + agentLocal; - const syncActions = totalActions - pendingLocal; + const skillSync = counts.skill.link + counts.skill.update + counts.skill.prune + counts.skill.skip; + const agentSync = counts.agent.link + counts.agent.update + counts.agent.prune + counts.agent.skip; + const pendingLocal = counts.skill.local + counts.agent.local; + const syncActions = skillSync + agentSync; return (
@@ -227,19 +221,19 @@ export default function SyncPage() { {skillSync > 0 && (
- {skillLinks > 0 && {skillLinks} to link} - {skillUpdates > 0 && {skillUpdates} to update} - {skillSkips > 0 && {skillSkips} skipped} - {skillPrunes > 0 && {skillPrunes} to prune} + {counts.skill.link > 0 && {counts.skill.link} to link} + {counts.skill.update > 0 && {counts.skill.update} to update} + {counts.skill.skip > 0 && {counts.skill.skip} skipped} + {counts.skill.prune > 0 && {counts.skill.prune} to prune}
)} {agentSync > 0 && (
- {agentLinks > 0 && {agentLinks} to link} - {agentUpdates > 0 && {agentUpdates} to update} - {agentSkips > 0 && {agentSkips} skipped} - {agentPrunes > 0 && {agentPrunes} to prune} + {counts.agent.link > 0 && {counts.agent.link} to link} + {counts.agent.update > 0 && {counts.agent.update} to update} + {counts.agent.skip > 0 && {counts.agent.skip} skipped} + {counts.agent.prune > 0 && {counts.agent.prune} to prune}
)} {pendingLocal > 0 && {pendingLocal} local only} From 4834e4580d36ad37921001897de4f6f567dc7acb Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 00:17:41 +0800 Subject: [PATCH 160/205] feat(server): extend collect scan to support kind=agent parameter handleCollectScan now accepts ?kind=skill|agent query parameter. When kind=agent, scans for local agent files in agent target dirs. When kind is empty, scans for both skills and agents. --- internal/server/handler_collect.go | 109 ++++++++++++++----- internal/server/handler_collect_test.go | 137 ++++++++++++++++++++++++ 2 files changed, 220 insertions(+), 26 deletions(-) diff --git a/internal/server/handler_collect.go b/internal/server/handler_collect.go index 87220ec6..9bd78a91 100644 --- a/internal/server/handler_collect.go +++ b/internal/server/handler_collect.go @@ -33,50 +33,107 @@ type collectSkillRef struct { TargetName string `json:"targetName"` } -// handleCollectScan scans targets for local (non-symlinked) skills. -// GET /api/collect/scan?target= (optional filter) +// handleCollectScan scans targets for local (non-symlinked) skills and/or agents. +// GET /api/collect/scan?target=&kind=skill|agent (both optional) +// When kind is omitted, scans for both skills and agents. func (s *Server) handleCollectScan(w http.ResponseWriter, r *http.Request) { // Snapshot config under RLock, then release before I/O. s.mu.RLock() source := s.cfg.Source globalMode := s.cfg.Mode targets := s.cloneTargets() + agentsSource := s.agentsSource() s.mu.RUnlock() filterTarget := r.URL.Query().Get("target") + kind := r.URL.Query().Get("kind") + if kind != "" && kind != kindSkill && kind != kindAgent { + writeError(w, http.StatusBadRequest, "invalid kind: must be 'skill', 'agent', or empty") + return + } - var scanTargets []scanTarget + // Collect items per target, merging skills and agents. + targetItems := make(map[string][]localSkillItem) totalCount := 0 - for name, target := range targets { - if filterTarget != "" && filterTarget != name { - continue + // --- Skill scan --- + if kind != kindAgent { + for name, target := range targets { + if filterTarget != "" && filterTarget != name { + continue + } + + sc := target.SkillsConfig() + mode := ssync.EffectiveMode(sc.Mode) + if sc.Mode == "" && globalMode != "" { + mode = globalMode + } + locals, err := ssync.FindLocalSkills(sc.Path, source, mode) + if err != nil { + writeError(w, http.StatusInternalServerError, "scan failed for "+name+": "+err.Error()) + return + } + + for _, sk := range locals { + targetItems[name] = append(targetItems[name], localSkillItem{ + Name: sk.Name, + Kind: kindSkill, + Path: sk.Path, + TargetName: name, + Size: ssync.CalculateDirSize(sk.Path), + ModTime: sk.ModTime.Format(time.RFC3339), + }) + } } + } - sc := target.SkillsConfig() - mode := ssync.EffectiveMode(sc.Mode) - if sc.Mode == "" && globalMode != "" { - mode = globalMode - } - locals, err := ssync.FindLocalSkills(sc.Path, source, mode) - if err != nil { - writeError(w, http.StatusInternalServerError, "scan failed for "+name+": "+err.Error()) - return + // --- Agent scan --- + if kind != kindSkill { + builtinAgents := s.builtinAgentTargets() + for name, target := range targets { + if filterTarget != "" && filterTarget != name { + continue + } + agentPath := resolveAgentPath(target, builtinAgents, name) + if agentPath == "" || agentsSource == "" { + continue + } + localAgents, err := ssync.FindLocalAgents(agentPath, agentsSource) + if err != nil { + writeError(w, http.StatusInternalServerError, "agent scan failed for "+name+": "+err.Error()) + return + } + for _, ag := range localAgents { + var size int64 + var modTime string + if info, err := os.Stat(ag.Path); err == nil { + size = info.Size() + modTime = info.ModTime().Format(time.RFC3339) + } + targetItems[name] = append(targetItems[name], localSkillItem{ + Name: ag.Name, + Kind: kindAgent, + Path: ag.Path, + TargetName: name, + Size: size, + ModTime: modTime, + }) + } } + } - items := make([]localSkillItem, 0, len(locals)) - for _, sk := range locals { - items = append(items, localSkillItem{ - Name: sk.Name, - Kind: "skill", - Path: sk.Path, - TargetName: name, - Size: ssync.CalculateDirSize(sk.Path), - ModTime: sk.ModTime.Format(time.RFC3339), - }) + // Build response from merged map. + var scanTargets []scanTarget + for name := range targets { + items := targetItems[name] + if len(items) == 0 && kind != "" { + // When filtering by kind, skip targets with no items of that kind. + continue } - totalCount += len(items) + if items == nil { + items = []localSkillItem{} + } scanTargets = append(scanTargets, scanTarget{ TargetName: name, Skills: items, diff --git a/internal/server/handler_collect_test.go b/internal/server/handler_collect_test.go index 19404bb9..bdbfbe3f 100644 --- a/internal/server/handler_collect_test.go +++ b/internal/server/handler_collect_test.go @@ -128,3 +128,140 @@ func TestHandleCollectScan_GlobalCopyModeInheritedTarget_SkipsManaged(t *testing t.Fatalf("expected only local-skill, got %q", resp.Targets[0].Skills[0].Name) } } + +func TestHandleCollectScan_AgentKind(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + agentPath := filepath.Join(t.TempDir(), "claude-agents") + agentsSource := filepath.Join(t.TempDir(), "agents-source") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // Write config YAML with agents_source and agent target path. + // The auto-reload middleware re-reads from disk on every API request. + cfgPath := os.Getenv("SKILLSHARE_CONFIG") + raw := "source: " + sourceDir + "\nagents_source: " + agentsSource + + "\nmode: merge\ntargets:\n claude:\n skills:\n path: " + tgtPath + + "\n agents:\n path: " + agentPath + "\n" + if err := os.WriteFile(cfgPath, []byte(raw), 0644); err != nil { + t.Fatalf("failed to update config: %v", err) + } + os.MkdirAll(agentsSource, 0755) + + // Create a local .md agent file in the agent target directory. + os.MkdirAll(agentPath, 0755) + os.WriteFile(filepath.Join(agentPath, "helper.md"), []byte("# helper agent"), 0644) + + req := httptest.NewRequest(http.MethodGet, "/api/collect/scan?kind=agent", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Targets []struct { + TargetName string `json:"targetName"` + Skills []struct { + Name string `json:"name"` + Kind string `json:"kind"` + } `json:"skills"` + } `json:"targets"` + TotalCount int `json:"totalCount"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + + if resp.TotalCount != 1 { + t.Fatalf("expected totalCount=1, got %d", resp.TotalCount) + } + if len(resp.Targets) == 0 { + t.Fatal("expected at least 1 target in response") + } + found := false + for _, tgt := range resp.Targets { + for _, sk := range tgt.Skills { + if sk.Name == "helper.md" && sk.Kind == "agent" { + found = true + } + } + } + if !found { + t.Fatalf("expected agent helper.md with kind=agent in response, got %+v", resp.Targets) + } +} + +func TestHandleCollectScan_BothKinds(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + agentPath := filepath.Join(t.TempDir(), "claude-agents") + agentsSource := filepath.Join(t.TempDir(), "agents-source") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // Write config YAML with both skills and agents paths. + cfgPath := os.Getenv("SKILLSHARE_CONFIG") + raw := "source: " + sourceDir + "\nagents_source: " + agentsSource + + "\nmode: merge\ntargets:\n claude:\n skills:\n path: " + tgtPath + + "\n agents:\n path: " + agentPath + "\n" + if err := os.WriteFile(cfgPath, []byte(raw), 0644); err != nil { + t.Fatalf("failed to update config: %v", err) + } + os.MkdirAll(agentsSource, 0755) + + // Create a local skill in skill target. + localSkill := filepath.Join(tgtPath, "local-skill") + os.MkdirAll(localSkill, 0755) + os.WriteFile(filepath.Join(localSkill, "SKILL.md"), []byte("local"), 0644) + + // Create a local agent in agent target. + os.MkdirAll(agentPath, 0755) + os.WriteFile(filepath.Join(agentPath, "reviewer.md"), []byte("# reviewer agent"), 0644) + + // No kind parameter — should return both. + req := httptest.NewRequest(http.MethodGet, "/api/collect/scan", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + TotalCount int `json:"totalCount"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + if resp.TotalCount != 2 { + t.Fatalf("expected totalCount=2 (1 skill + 1 agent), got %d", resp.TotalCount) + } +} + +func TestHandleCollectScan_AgentKind_NoSource(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // Write config without agents_source — should return 0 agents, no error. + cfgPath := os.Getenv("SKILLSHARE_CONFIG") + raw := "source: " + sourceDir + "\nmode: merge\ntargets:\n claude:\n path: " + tgtPath + "\n" + if err := os.WriteFile(cfgPath, []byte(raw), 0644); err != nil { + t.Fatalf("failed to update config: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/collect/scan?kind=agent", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + TotalCount int `json:"totalCount"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + if resp.TotalCount != 0 { + t.Fatalf("expected totalCount=0 when no agents source, got %d", resp.TotalCount) + } +} From 4b37f1f21d280538ef9ed40abe02568ebc99948d Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 00:23:44 +0800 Subject: [PATCH 161/205] feat(server): extend collect endpoint to pull agents via kind field collectSkillRef now has optional kind field. Items with kind=agent are resolved against agent target paths and pulled via PullAgents(). Results from skills and agents are merged into a single response. Oplog tracks skill and agent counts separately for observability. --- internal/server/handler_collect.go | 155 ++++++++++++++++++------ internal/server/handler_collect_test.go | 119 ++++++++++++++++++ 2 files changed, 234 insertions(+), 40 deletions(-) diff --git a/internal/server/handler_collect.go b/internal/server/handler_collect.go index 9bd78a91..78cfc960 100644 --- a/internal/server/handler_collect.go +++ b/internal/server/handler_collect.go @@ -2,6 +2,7 @@ package server import ( "encoding/json" + "maps" "net/http" "os" "path/filepath" @@ -31,6 +32,7 @@ type scanTarget struct { type collectSkillRef struct { Name string `json:"name"` TargetName string `json:"targetName"` + Kind string `json:"kind,omitempty"` } // handleCollectScan scans targets for local (non-symlinked) skills and/or agents. @@ -150,8 +152,9 @@ func (s *Server) handleCollectScan(w http.ResponseWriter, r *http.Request) { }) } -// handleCollect pulls selected local skills from targets to source. -// POST /api/collect { skills: [{name, targetName}], force: bool } +// handleCollect pulls selected local skills and/or agents from targets to source. +// POST /api/collect { skills: [{name, targetName, kind?}], force: bool } +// Items with kind="agent" are pulled as agents; others as skills (backward compat). func (s *Server) handleCollect(w http.ResponseWriter, r *http.Request) { start := time.Now() s.mu.Lock() @@ -171,69 +174,141 @@ func (s *Server) handleCollect(w http.ResponseWriter, r *http.Request) { return } - // Resolve each skill ref to a LocalSkillInfo - var resolved []ssync.LocalSkillInfo + // Split items by kind. + var skillRefs, agentRefs []collectSkillRef for _, ref := range body.Skills { - target, ok := s.cfg.Targets[ref.TargetName] - if !ok { - writeError(w, http.StatusBadRequest, "unknown target: "+ref.TargetName) - return + if ref.Kind == kindAgent { + agentRefs = append(agentRefs, ref) + } else { + skillRefs = append(skillRefs, ref) + } + } + + opts := ssync.PullOptions{Force: body.Force} + + // Merged results across skills and agents. + var allPulled, allSkipped []string + allFailed := make(map[string]error) + var skillsPulled, agentsPulled int + + // --- Pull skills --- + if len(skillRefs) > 0 { + var resolved []ssync.LocalSkillInfo + for _, ref := range skillRefs { + target, ok := s.cfg.Targets[ref.TargetName] + if !ok { + writeError(w, http.StatusBadRequest, "unknown target: "+ref.TargetName) + return + } + + skillPath := filepath.Join(target.SkillsConfig().Path, ref.Name) + info, err := os.Lstat(skillPath) + if err != nil { + writeError(w, http.StatusBadRequest, "skill not found: "+ref.Name+" in "+ref.TargetName) + return + } + if info.Mode()&os.ModeSymlink != 0 { + writeError(w, http.StatusBadRequest, "skill is a symlink (not local): "+ref.Name) + return + } + if !info.IsDir() { + writeError(w, http.StatusBadRequest, "skill is not a directory: "+ref.Name) + return + } + + resolved = append(resolved, ssync.LocalSkillInfo{ + Name: ref.Name, + Path: skillPath, + TargetName: ref.TargetName, + }) } - skillPath := filepath.Join(target.SkillsConfig().Path, ref.Name) - info, err := os.Lstat(skillPath) + result, err := ssync.PullSkills(resolved, s.cfg.Source, opts) if err != nil { - writeError(w, http.StatusBadRequest, "skill not found: "+ref.Name+" in "+ref.TargetName) + writeError(w, http.StatusInternalServerError, "collect failed: "+err.Error()) return } - if info.Mode()&os.ModeSymlink != 0 { - writeError(w, http.StatusBadRequest, "skill is a symlink (not local): "+ref.Name) - return + skillsPulled = len(result.Pulled) + allPulled = append(allPulled, result.Pulled...) + allSkipped = append(allSkipped, result.Skipped...) + maps.Copy(allFailed, result.Failed) + } + + // --- Pull agents --- + if len(agentRefs) > 0 { + builtinAgents := s.builtinAgentTargets() + agentsSource := s.agentsSource() + + var resolved []ssync.LocalAgentInfo + for _, ref := range agentRefs { + target, ok := s.cfg.Targets[ref.TargetName] + if !ok { + writeError(w, http.StatusBadRequest, "unknown target: "+ref.TargetName) + return + } + + agentPath := resolveAgentPath(target, builtinAgents, ref.TargetName) + if agentPath == "" { + writeError(w, http.StatusBadRequest, "no agent path for target: "+ref.TargetName) + return + } + + filePath := filepath.Join(agentPath, ref.Name) + info, err := os.Lstat(filePath) + if err != nil { + writeError(w, http.StatusBadRequest, "agent not found: "+ref.Name+" in "+ref.TargetName) + return + } + if info.Mode()&os.ModeSymlink != 0 { + writeError(w, http.StatusBadRequest, "agent is a symlink (not local): "+ref.Name) + return + } + + resolved = append(resolved, ssync.LocalAgentInfo{ + Name: ref.Name, + Path: filePath, + TargetName: ref.TargetName, + }) } - if !info.IsDir() { - writeError(w, http.StatusBadRequest, "skill is not a directory: "+ref.Name) + + result, err := ssync.PullAgents(resolved, agentsSource, opts) + if err != nil { + writeError(w, http.StatusInternalServerError, "agent collect failed: "+err.Error()) return } - - resolved = append(resolved, ssync.LocalSkillInfo{ - Name: ref.Name, - Path: skillPath, - TargetName: ref.TargetName, - }) - } - - result, err := ssync.PullSkills(resolved, s.cfg.Source, ssync.PullOptions{ - Force: body.Force, - }) - if err != nil { - writeError(w, http.StatusInternalServerError, "collect failed: "+err.Error()) - return + agentsPulled = len(result.Pulled) + allPulled = append(allPulled, result.Pulled...) + allSkipped = append(allSkipped, result.Skipped...) + maps.Copy(allFailed, result.Failed) } // Convert Failed map to string values for JSON - failed := make(map[string]string, len(result.Failed)) - for k, v := range result.Failed { + failed := make(map[string]string, len(allFailed)) + for k, v := range allFailed { failed[k] = v.Error() } status := "ok" msg := "" - if len(result.Failed) > 0 { + if len(allFailed) > 0 { status = "partial" - msg = "some skills failed to collect" + msg = "some items failed to collect" } s.writeOpsLog("collect", status, start, map[string]any{ - "skills_selected": len(body.Skills), - "skills_pulled": len(result.Pulled), - "skills_skipped": len(result.Skipped), - "skills_failed": len(result.Failed), + "skills_selected": len(skillRefs), + "skills_pulled": skillsPulled, + "agents_selected": len(agentRefs), + "agents_pulled": agentsPulled, + "total_pulled": len(allPulled), + "total_skipped": len(allSkipped), + "total_failed": len(allFailed), "force": body.Force, "scope": "ui", }, msg) writeJSON(w, map[string]any{ - "pulled": result.Pulled, - "skipped": result.Skipped, + "pulled": allPulled, + "skipped": allSkipped, "failed": failed, }) } diff --git a/internal/server/handler_collect_test.go b/internal/server/handler_collect_test.go index bdbfbe3f..98237086 100644 --- a/internal/server/handler_collect_test.go +++ b/internal/server/handler_collect_test.go @@ -236,6 +236,125 @@ func TestHandleCollectScan_BothKinds(t *testing.T) { } } +func TestHandleCollect_AgentItems(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + agentPath := filepath.Join(t.TempDir(), "claude-agents") + agentsSource := filepath.Join(t.TempDir(), "agents-source") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // Write config YAML with agents_source and agent target path. + cfgPath := os.Getenv("SKILLSHARE_CONFIG") + raw := "source: " + sourceDir + "\nagents_source: " + agentsSource + + "\nmode: merge\ntargets:\n claude:\n skills:\n path: " + tgtPath + + "\n agents:\n path: " + agentPath + "\n" + if err := os.WriteFile(cfgPath, []byte(raw), 0644); err != nil { + t.Fatalf("failed to update config: %v", err) + } + os.MkdirAll(agentsSource, 0755) + + // Create a local .md agent file in the agent target directory. + os.MkdirAll(agentPath, 0755) + agentContent := "# helper agent\nThis is a test agent." + os.WriteFile(filepath.Join(agentPath, "helper.md"), []byte(agentContent), 0644) + + body := `{"skills":[{"name":"helper.md","targetName":"claude","kind":"agent"}],"force":false}` + req := httptest.NewRequest(http.MethodPost, "/api/collect", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Pulled []string `json:"pulled"` + Skipped []string `json:"skipped"` + Failed map[string]string `json:"failed"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + + if len(resp.Pulled) != 1 || resp.Pulled[0] != "helper.md" { + t.Fatalf("expected pulled=[helper.md], got %v", resp.Pulled) + } + if len(resp.Failed) != 0 { + t.Fatalf("expected no failures, got %v", resp.Failed) + } + + // Verify the agent file was copied to agents source. + destPath := filepath.Join(agentsSource, "helper.md") + data, err := os.ReadFile(destPath) + if err != nil { + t.Fatalf("agent not found in source dir: %v", err) + } + if string(data) != agentContent { + t.Fatalf("agent content mismatch: got %q, want %q", string(data), agentContent) + } +} + +func TestHandleCollect_MixedSkillsAndAgents(t *testing.T) { + tgtPath := filepath.Join(t.TempDir(), "claude-skills") + agentPath := filepath.Join(t.TempDir(), "claude-agents") + agentsSource := filepath.Join(t.TempDir(), "agents-source") + s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) + + // Write config YAML with both skills and agents paths. + cfgPath := os.Getenv("SKILLSHARE_CONFIG") + raw := "source: " + sourceDir + "\nagents_source: " + agentsSource + + "\nmode: merge\ntargets:\n claude:\n skills:\n path: " + tgtPath + + "\n agents:\n path: " + agentPath + "\n" + if err := os.WriteFile(cfgPath, []byte(raw), 0644); err != nil { + t.Fatalf("failed to update config: %v", err) + } + os.MkdirAll(agentsSource, 0755) + + // Create a local skill directory in skill target. + localSkill := filepath.Join(tgtPath, "my-skill") + os.MkdirAll(localSkill, 0755) + os.WriteFile(filepath.Join(localSkill, "SKILL.md"), []byte("local skill"), 0644) + + // Create a local agent file in agent target. + os.MkdirAll(agentPath, 0755) + os.WriteFile(filepath.Join(agentPath, "reviewer.md"), []byte("# reviewer"), 0644) + + body := `{"skills":[` + + `{"name":"my-skill","targetName":"claude"},` + + `{"name":"reviewer.md","targetName":"claude","kind":"agent"}` + + `],"force":false}` + req := httptest.NewRequest(http.MethodPost, "/api/collect", strings.NewReader(body)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Pulled []string `json:"pulled"` + Skipped []string `json:"skipped"` + Failed map[string]string `json:"failed"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + + if len(resp.Pulled) != 2 { + t.Fatalf("expected 2 pulled items, got %v", resp.Pulled) + } + if len(resp.Failed) != 0 { + t.Fatalf("expected no failures, got %v", resp.Failed) + } + + // Verify both exist in their respective source dirs. + if _, err := os.Stat(filepath.Join(sourceDir, "my-skill", "SKILL.md")); err != nil { + t.Fatalf("skill not found in source: %v", err) + } + if _, err := os.Stat(filepath.Join(agentsSource, "reviewer.md")); err != nil { + t.Fatalf("agent not found in agents source: %v", err) + } +} + func TestHandleCollectScan_AgentKind_NoSource(t *testing.T) { tgtPath := filepath.Join(t.TempDir(), "claude-skills") s, sourceDir := newTestServerWithTargets(t, map[string]string{"claude": tgtPath}) From bf2aff00590e562ac2246f2bbf82b3b825b821b3 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 00:25:53 +0800 Subject: [PATCH 162/205] feat(ui): extend collect API client with kind parameter collectScan now accepts optional kind param for agent scanning. collect method items accept optional kind field. --- ui/src/api/client.ts | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index 76f361fa..0454d053 100644 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -324,9 +324,14 @@ export const api = { apiFetch(`/resources/${encodeURIComponent(skillName)}/files/${filepath}`), // Collect - collectScan: (target?: string) => - apiFetch(`/collect/scan${target ? '?target=' + encodeURIComponent(target) : ''}`), - collect: (opts: { skills: { name: string; targetName: string }[]; force?: boolean }) => + collectScan: (target?: string, kind?: 'skill' | 'agent') => { + const params = new URLSearchParams(); + if (target) params.set('target', target); + if (kind) params.set('kind', kind); + const qs = params.toString(); + return apiFetch(`/collect/scan${qs ? '?' + qs : ''}`); + }, + collect: (opts: { skills: { name: string; targetName: string; kind?: string }[]; force?: boolean }) => apiFetch('/collect', { method: 'POST', body: JSON.stringify(opts), From 35fcb7475b6d0167c8c9e9caefea7b23c3b06ffe Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 00:28:39 +0800 Subject: [PATCH 163/205] feat(ui): add SegmentedControl for agent scope on Collect page CollectPage now has Skills/Agents/Both scope selector matching SyncPage pattern. Scan and collect operations pass kind to API. Selected set uses 3-segment key (target/kind/name) to prevent collision between skills and agents with same name. --- ui/src/pages/CollectPage.tsx | 62 ++++++++++++++++++++++++++---------- 1 file changed, 46 insertions(+), 16 deletions(-) diff --git a/ui/src/pages/CollectPage.tsx b/ui/src/pages/CollectPage.tsx index 13019ad6..5039c406 100644 --- a/ui/src/pages/CollectPage.tsx +++ b/ui/src/pages/CollectPage.tsx @@ -28,6 +28,7 @@ import { queryKeys } from '../lib/queryKeys'; import { radius, shadows } from '../design'; import { formatSize } from '../lib/format'; import KindBadge from '../components/KindBadge'; +import SegmentedControl from '../components/SegmentedControl'; type Phase = 'idle' | 'scanning' | 'scanned' | 'collecting' | 'done'; @@ -44,6 +45,23 @@ export default function CollectPage() { const [result, setResult] = useState(null); const [confirming, setConfirming] = useState(false); const { toast } = useToast(); + const [scope, setScope] = useState<'skill' | 'agent' | 'both'>('skill'); + + const scopeLabels = { + skill: { noun: 'skill', nounPlural: 'skills', scanBtn: 'Scan for Local Skills', entity: 'Skills' }, + agent: { noun: 'agent', nounPlural: 'agents', scanBtn: 'Scan for Local Agents', entity: 'Agents' }, + both: { noun: 'resource', nounPlural: 'resources', scanBtn: 'Scan for Local Resources', entity: 'Resources' }, + }; + const labels = scopeLabels[scope]; + + // Reset state when scope changes + useEffect(() => { + setPhase('idle'); + setScanTargets([]); + setTotalCount(0); + setSelected(new Set()); + setResult(null); + }, [scope]); // Auto-scan when target query param is present useEffect(() => { @@ -57,14 +75,14 @@ export default function CollectPage() { setPhase('scanning'); setResult(null); try { - const res = await api.collectScan(targetFilter); + const res = await api.collectScan(targetFilter, scope === 'both' ? undefined : scope); setScanTargets(res.targets); setTotalCount(res.totalCount); // Auto-select all const allKeys = new Set(); for (const t of res.targets) { for (const sk of t.skills) { - allKeys.add(`${t.targetName}/${sk.name}`); + allKeys.add(`${t.targetName}/${sk.kind ?? 'skill'}/${sk.name}`); } } setSelected(allKeys); @@ -79,8 +97,8 @@ export default function CollectPage() { setPhase('collecting'); try { const skills = Array.from(selected).map((key) => { - const [targetName, ...rest] = key.split('/'); - return { name: rest.join('/'), targetName }; + const [targetName, kind, ...rest] = key.split('/'); + return { name: rest.join('/'), targetName, kind }; }); const res = await api.collect({ skills, force }); setResult(res); @@ -114,7 +132,7 @@ export default function CollectPage() { const allKeys = new Set(); for (const t of scanTargets) { for (const sk of t.skills) { - allKeys.add(`${t.targetName}/${sk.name}`); + allKeys.add(`${t.targetName}/${sk.kind ?? 'skill'}/${sk.name}`); } } setSelected(allKeys); @@ -193,6 +211,18 @@ export default function CollectPage() { {/* Scan control area */}
+ + {presetTarget && ( @@ -234,8 +264,8 @@ export default function CollectPage() { {totalCount === 0 ? ( ) : (
@@ -244,7 +274,7 @@ export default function CollectPage() {

- Found {totalCount} local skill{totalCount !== 1 ? 's' : ''} + Found {totalCount} local {totalCount !== 1 ? labels.nounPlural : labels.noun}

)} @@ -312,7 +342,7 @@ export default function CollectPage() {

- Skills collected to source! Run Sync to distribute them to all targets. + {labels.entity} collected to source! Run Sync to distribute them to all targets.

@@ -658,7 +658,7 @@ export default function InstallForm({
- Skill installed with audit warnings + Resource installed with audit warnings
{warningFindings.length} {warningFindings.length === 1 ? 'warning' : 'warnings'}: diff --git a/ui/src/components/SkillPickerModal.tsx b/ui/src/components/SkillPickerModal.tsx index 44444999..0b400e33 100644 --- a/ui/src/components/SkillPickerModal.tsx +++ b/ui/src/components/SkillPickerModal.tsx @@ -83,16 +83,17 @@ export default function SkillPickerModal({ if (items.length > 0) onInstall(items); }; + const allAgents = skills.length > 0 && skills.every((s) => s.kind === 'agent'); + const someAgents = skills.some((s) => s.kind === 'agent'); + const singularLabel = allAgents ? 'agent' : someAgents ? 'resource' : 'skill'; + const pluralLabel = allAgents ? 'agents' : someAgents ? 'resources' : 'skills'; + return (

{singleSelect - ? 'Select a Resource to Install' - : skills.some((s) => s.kind === 'agent') - ? skills.every((s) => s.kind === 'agent') - ? 'Select Agents to Install' - : 'Select Resources to Install' - : 'Select Skills to Install' + ? `Select ${singularLabel[0].toUpperCase() + singularLabel.slice(1)} to Install` + : `Select ${pluralLabel[0].toUpperCase() + pluralLabel.slice(1)} to Install` }

@@ -109,7 +110,7 @@ export default function SkillPickerModal({ /> setFilter(e.target.value)} className="!pl-8 !py-1.5 !text-sm font-mono" @@ -127,7 +128,7 @@ export default function SkillPickerModal({ /> {filter && ( - {filtered.length} of {skills.length} skills + {filtered.length} of {skills.length} {pluralLabel} )}

@@ -137,7 +138,7 @@ export default function SkillPickerModal({ {singleSelect && (
- Custom name is set — select one skill + Custom name is set — select one {singularLabel} {filter && ` (${filtered.length} of ${skills.length})`}
diff --git a/ui/src/components/tour/tourSteps.ts b/ui/src/components/tour/tourSteps.ts index 2d91581a..50b4a8dc 100644 --- a/ui/src/components/tour/tourSteps.ts +++ b/ui/src/components/tour/tourSteps.ts @@ -11,14 +11,14 @@ export interface TourStep { const ALL_STEPS: TourStep[] = [ { id: 'stats-grid', page: '/', targetSelector: "[data-tour='stats-grid']", title: 'Dashboard Overview', description: 'Real-time stats for skills, targets, and sync status. Zeros are normal — numbers update after installing skills.', placement: 'bottom' }, { id: 'quick-actions', page: '/', targetSelector: "[data-tour='quick-actions']", title: 'Quick Actions', description: 'Shortcuts for common operations: one-click sync, security scan, browse skills, batch update.', placement: 'top' }, - { id: 'skills-view', page: '/resources', targetSelector: "[data-tour='skills-view']", title: 'Resource Management', description: 'Browse all installed skills and agents. Switch between Skills and Agents tabs, with grid, folder, and table views.', emptyDescription: 'No resources yet. After the tour, try installing your first skill from Search or Install!', placement: 'bottom' }, + { id: 'skills-view', page: '/resources', targetSelector: "[data-tour='skills-view']", title: 'Resource Management', description: 'Browse all installed skills and agents. Switch between Skills and Agents tabs, with grid, folder, and table views.', emptyDescription: 'No resources yet. After the tour, try installing your first resource from Search or Install!', placement: 'bottom' }, { id: 'extras-list', page: '/extras', targetSelector: "[data-tour='extras-list']", title: 'Extras', description: 'Manage non-skill extra file directories (hooks, snippets, etc.) synced to targets.', emptyDescription: 'No extras yet. After the tour, try adding an extra directory (hooks, snippets, etc.) to sync alongside your skills!', placement: 'bottom' }, { id: 'targets-grid', page: '/targets', targetSelector: "[data-tour='targets-grid']", title: 'Targets', description: 'Your AI CLI tools (Claude, Cursor, etc.). Each target can be configured with its own sync mode.', placement: 'bottom' }, { id: 'skill-filters', page: '/targets', targetSelector: "[data-tour='skill-filters']", title: 'Skill Filters', description: 'Use Include/Exclude patterns to control which skills sync to each target. For example, exclude large skills from lightweight tools.', placement: 'bottom' }, { id: 'search-input', page: '/search', targetSelector: "[data-tour='search-input']", title: 'Search Skills', description: 'Search community-shared skills from GitHub and Hubs. Install directly from results.', placement: 'bottom' }, { id: 'sync-actions', page: '/sync', targetSelector: "[data-tour='sync-actions']", title: 'Sync Operations', description: 'Sync skills from source to all targets. Preview with Diff before executing.', placement: 'bottom' }, { id: 'collect-scan', page: '/collect', targetSelector: "[data-tour='collect-scan']", title: 'Collect Local Skills', description: 'Scan targets for manually created skills and collect them back to source for unified management.', placement: 'bottom' }, - { id: 'install-form', page: '/install', targetSelector: "[data-tour='install-form']", title: 'Install Skills', description: 'Enter a GitHub repo URL to install skills. Track mode (--track) enables future updates.', placement: 'bottom' }, + { id: 'install-form', page: '/install', targetSelector: "[data-tour='install-form']", title: 'Install Resources', description: 'Enter a git repo URL or local path to install skills or agents. Track mode (--track) enables future updates.', placement: 'bottom' }, { id: 'audit-summary', page: '/audit', targetSelector: "[data-tour='audit-summary']", title: 'Security Audit', description: 'Scan all skills for security risks, graded by severity (Critical → Info). Run regularly.', placement: 'bottom' }, { id: 'git-actions', page: '/git', targetSelector: "[data-tour='git-actions']", title: 'Git Sync', description: 'Back up and sync skill configs via Git. Push to upload, Pull to download.', placement: 'bottom' }, { id: 'log-filters', page: '/log', targetSelector: "[data-tour='log-filters']", title: 'Operation Log', description: 'View all operation history. Filter by command type and time range.', placement: 'bottom' }, diff --git a/ui/src/pages/InstallPage.tsx b/ui/src/pages/InstallPage.tsx index b54d7348..68ab09be 100644 --- a/ui/src/pages/InstallPage.tsx +++ b/ui/src/pages/InstallPage.tsx @@ -14,8 +14,8 @@ export default function InstallPage() {
} - title="Install Skill" - subtitle="Install from any git repository or local path" + title="Install Resources" + subtitle="Install skills or agents from any git repository or local path" />
From 7e30699eaa4bf84a4b52c13c73cf1785391b1f79 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 02:04:55 +0800 Subject: [PATCH 165/205] feat: add tracked agent repo discovery and install support - DiscoveredResource gains RepoRelPath and Targets fields - AgentKind.Discover populates RepoRelPath via findTrackedRepoRelPath - AgentCheckResult gains RepoPath and dirty status for tracked repos - InferTrackedKind auto-detects pure-agent repos for --track install - TrackedRepoResult reports AgentCount and Agents list --- internal/check/agent_check.go | 49 +++++++++++++++++++++++++---- internal/install/install.go | 34 ++++++++++++++++++++ internal/install/install_tracked.go | 13 +++++++- internal/resource/agent.go | 42 ++++++++++++++++++++----- internal/resource/kind.go | 20 ++++++------ 5 files changed, 134 insertions(+), 24 deletions(-) diff --git a/internal/check/agent_check.go b/internal/check/agent_check.go index 1b530f36..482a388b 100644 --- a/internal/check/agent_check.go +++ b/internal/check/agent_check.go @@ -3,6 +3,7 @@ package check import ( "path/filepath" + "skillshare/internal/git" "skillshare/internal/install" "skillshare/internal/resource" "skillshare/internal/utils" @@ -10,12 +11,13 @@ import ( // AgentCheckResult holds the check result for a single agent. type AgentCheckResult struct { - Name string `json:"name"` - Source string `json:"source,omitempty"` - Version string `json:"version,omitempty"` - RepoURL string `json:"repoUrl,omitempty"` - Status string `json:"status"` // "up_to_date", "drifted", "local", "error", "update_available" - Message string `json:"message,omitempty"` + Name string `json:"name"` + Source string `json:"source,omitempty"` + Version string `json:"version,omitempty"` + RepoURL string `json:"repoUrl,omitempty"` + RepoPath string `json:"repoPath,omitempty"` + Status string `json:"status"` // "up_to_date", "drifted", "dirty", "local", "error", "update_available" + Message string `json:"message,omitempty"` } // CheckAgents scans the agents source directory for installed agents and @@ -43,12 +45,47 @@ func CheckAgents(agentsDir string) []AgentCheckResult { continue } result := checkOneAgent(store, d.SourcePath, d.RelPath) + if d.RepoRelPath != "" { + result = checkTrackedAgentRepo(agentsDir, d) + } results = append(results, result) } return results } +func checkTrackedAgentRepo(agentsDir string, d resource.DiscoveredResource) AgentCheckResult { + result := AgentCheckResult{ + Name: d.RelPath[:len(d.RelPath)-len(".md")], + RepoPath: filepath.Join(agentsDir, filepath.FromSlash(d.RepoRelPath)), + } + + if !install.IsGitRepo(result.RepoPath) { + result.Status = "local" + return result + } + + repoURL, _ := git.GetRemoteURL(result.RepoPath) + version, _ := git.GetCurrentFullHash(result.RepoPath) + result.Source = repoURL + result.RepoURL = repoURL + result.Version = version + + if repoURL == "" || version == "" { + result.Status = "local" + return result + } + + if isDirty, _ := git.IsDirty(result.RepoPath); isDirty { + result.Status = "dirty" + result.Message = "tracked repo has uncommitted changes" + return result + } + + result.Status = "up_to_date" + return result +} + // checkOneAgent checks a single agent file against the centralized metadata store. // sourcePath is the absolute path to the .md file; relPath is relative to the // agents root (e.g. "demo/code-reviewer.md"). diff --git a/internal/install/install.go b/internal/install/install.go index acc2fcbc..bc8ecf49 100644 --- a/internal/install/install.go +++ b/internal/install/install.go @@ -102,6 +102,8 @@ type TrackedRepoResult struct { RepoPath string // Full path to the repo SkillCount int // Number of skills discovered Skills []string // Names of discovered skills + AgentCount int // Number of agents discovered + Agents []string // Names of discovered agents Action string // "cloned", "updated", "skipped" Warnings []string AuditThreshold string @@ -219,6 +221,38 @@ func DiscoverFromGitSubdirWithProgress(source *Source, onProgress ProgressCallba return discoverFromGitSubdirWithProgressImpl(source, onProgress) } +// InferTrackedKind determines which resource kind a tracked install should use. +// Pure-agent repositories resolve to "agent". Mixed repositories must specify +// the kind explicitly to avoid ambiguous install roots. +func InferTrackedKind(source *Source, explicitKind string) (string, error) { + if explicitKind == "skill" || explicitKind == "agent" { + return explicitKind, nil + } + + var ( + discovery *DiscoveryResult + err error + ) + if source.HasSubdir() { + discovery, err = DiscoverFromGitSubdir(source) + } else { + discovery, err = DiscoverFromGit(source) + } + if err != nil { + return "", err + } + defer CleanupDiscovery(discovery) + + switch { + case discovery.HasSkills() && discovery.HasAgents(): + return "", fmt.Errorf("tracked install is ambiguous for mixed repositories; pass --kind skill or --kind agent") + case discovery.HasAgents() && !discovery.HasSkills(): + return "agent", nil + default: + return "skill", nil + } +} + // DiscoverLocal inspects a local directory and discovers skills and agents. func DiscoverLocal(source *Source) (*DiscoveryResult, error) { return discoverLocalImpl(source) diff --git a/internal/install/install_tracked.go b/internal/install/install_tracked.go index 65667d7d..a3f13822 100644 --- a/internal/install/install_tracked.go +++ b/internal/install/install_tracked.go @@ -32,9 +32,12 @@ func installTrackedRepoImpl(source *Source, sourceDir string, opts InstallOption destBase := sourceDir if opts.Into != "" { destBase = filepath.Join(sourceDir, opts.Into) - if err := os.MkdirAll(destBase, 0755); err != nil { + } + if err := os.MkdirAll(destBase, 0755); err != nil { + if opts.Into != "" { return nil, fmt.Errorf("failed to create --into directory: %w", err) } + return nil, fmt.Errorf("failed to create source directory: %w", err) } destPath := filepath.Join(destBase, trackedName) @@ -89,7 +92,11 @@ func installTrackedRepoImpl(source *Source, sourceDir string, opts InstallOption // Also discover agents in the tracked repo agents := discoverAgents(destPath, len(skills) > 0) + result.AgentCount = len(agents) if len(agents) > 0 { + for _, agent := range agents { + result.Agents = append(result.Agents, agent.Name) + } result.Warnings = append(result.Warnings, fmt.Sprintf("%d agent(s) found in tracked repo", len(agents))) } @@ -155,7 +162,11 @@ func updateTrackedRepo(repoPath string, result *TrackedRepoResult, opts InstallO // Also discover agents in the tracked repo agents := discoverAgents(repoPath, len(skills) > 0) + result.AgentCount = len(agents) if len(agents) > 0 { + for _, agent := range agents { + result.Agents = append(result.Agents, agent.Name) + } result.Warnings = append(result.Warnings, fmt.Sprintf("%d agent(s) found in tracked repo", len(agents))) } diff --git a/internal/resource/agent.go b/internal/resource/agent.go index 5dfe5c2d..344acc4a 100644 --- a/internal/resource/agent.go +++ b/internal/resource/agent.go @@ -73,18 +73,23 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { disabled := ignoreMatcher.HasRules() && ignoreMatcher.Match(relPath, false) name := agentNameFromFile(path, info.Name()) + targets := utils.ParseFrontmatterList(path, "targets") isNested := strings.Contains(relPath, "/") + repoRelPath := findTrackedRepoRelPath(walkRoot, relPath) resources = append(resources, DiscoveredResource{ - Name: name, - Kind: "agent", - RelPath: relPath, - AbsPath: path, - IsNested: isNested, - Disabled: disabled, - FlatName: AgentFlatName(relPath), - SourcePath: filepath.Join(sourceDir, relPath), + Name: name, + Kind: "agent", + RelPath: relPath, + AbsPath: path, + IsNested: isNested, + IsInRepo: repoRelPath != "", + RepoRelPath: repoRelPath, + Disabled: disabled, + FlatName: AgentFlatName(relPath), + SourcePath: filepath.Join(sourceDir, relPath), + Targets: targets, }) return nil @@ -97,6 +102,27 @@ func (AgentKind) Discover(sourceDir string) ([]DiscoveredResource, error) { return resources, nil } +func findTrackedRepoRelPath(root, relPath string) string { + dir := filepath.Dir(relPath) + if dir == "." || dir == "" { + return "" + } + + parts := strings.Split(filepath.ToSlash(dir), "/") + for i, part := range parts { + if !utils.IsTrackedRepoDir(part) { + continue + } + candidate := strings.Join(parts[:i+1], "/") + gitDir := filepath.Join(root, filepath.FromSlash(candidate), ".git") + if info, err := os.Stat(gitDir); err == nil && info.IsDir() { + return candidate + } + } + + return "" +} + // agentNameFromFile resolves an agent name. Checks frontmatter name field // first, falls back to filename without .md extension. func agentNameFromFile(filePath, fileName string) string { diff --git a/internal/resource/kind.go b/internal/resource/kind.go index 1df12e90..2a3b8816 100644 --- a/internal/resource/kind.go +++ b/internal/resource/kind.go @@ -37,15 +37,17 @@ type ResourceKind interface { // DiscoveredResource represents a resource found during source directory scan. // Used for both skills and agents. type DiscoveredResource struct { - Name string // Canonical name (from frontmatter or filename) - Kind string // "skill" or "agent" - RelPath string // Relative path from source root - AbsPath string // Full absolute path - IsNested bool // Whether this resource is inside a subdirectory - FlatName string // Flattened name for target directories - IsInRepo bool // Whether this resource is inside a tracked repo - Disabled bool // Whether this resource is ignored by ignore file - SourcePath string // Full path preserving caller's logical path (may differ from AbsPath if symlinked) + Name string // Canonical name (from frontmatter or filename) + Kind string // "skill" or "agent" + RelPath string // Relative path from source root + AbsPath string // Full absolute path + IsNested bool // Whether this resource is inside a subdirectory + FlatName string // Flattened name for target directories + IsInRepo bool // Whether this resource is inside a tracked repo + RepoRelPath string // Relative path of the tracked repo root (when IsInRepo) + Disabled bool // Whether this resource is ignored by ignore file + SourcePath string // Full path preserving caller's logical path (may differ from AbsPath if symlinked) + Targets []string // Per-resource target restrictions from frontmatter (nil = all targets) } // ConventionalExcludes are filenames excluded from agent discovery. From b23425aafd213828bb92f9a71316bb219027ba04 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 02:05:01 +0800 Subject: [PATCH 166/205] feat(cli): tracked agent repo support in install, check, update, doctor - install: --agent + --track validation, InferTrackedKind dispatch - install_handlers: agent kind routing, renderTrackedAgentRepoMeta - check: enrich tracked agents with remote status (update_available, dirty) - update_agents: batchUpdateAgents with tracked repo git pull - list: agent repo URL fallback via RepoRelPath - doctor: filter-aware agent count, project mode version check --- cmd/skillshare/check.go | 25 +++++++++++ cmd/skillshare/doctor.go | 35 ++++++++++------ cmd/skillshare/doctor_agents.go | 57 +++++++++++++++++++------ cmd/skillshare/install.go | 3 ++ cmd/skillshare/install_handlers.go | 39 ++++++++++++++--- cmd/skillshare/list.go | 6 +++ cmd/skillshare/update_agents.go | 67 ++++++++++++++++++++---------- 7 files changed, 179 insertions(+), 53 deletions(-) diff --git a/cmd/skillshare/check.go b/cmd/skillshare/check.go index f9aff4df..17c2b635 100644 --- a/cmd/skillshare/check.go +++ b/cmd/skillshare/check.go @@ -933,6 +933,27 @@ func renderAgentCheck(agentsDir string, groups []string, jsonMode bool) { agentResults = filtered } + trackedIndices := make([]int, 0, len(agentResults)) + tracked := make([]check.AgentCheckResult, 0, len(agentResults)) + for i := range agentResults { + if agentResults[i].Source != "" { + trackedIndices = append(trackedIndices, i) + tracked = append(tracked, agentResults[i]) + } + } + if len(tracked) > 0 { + if jsonMode { + check.EnrichAgentResultsWithRemote(tracked, nil) + } else { + sp := ui.StartSpinner(fmt.Sprintf("Checking %d tracked agent(s)...", len(tracked))) + check.EnrichAgentResultsWithRemote(tracked, func() { sp.Success("Check complete") }) + fmt.Println() + } + for i, idx := range trackedIndices { + agentResults[idx] = tracked[i] + } + } + if jsonMode { out, _ := json.MarshalIndent(agentResults, "", " ") fmt.Println(string(out)) @@ -948,8 +969,12 @@ func renderAgentCheck(agentsDir string, groups []string, jsonMode bool) { switch r.Status { case "up_to_date": ui.ListItem("success", r.Name, "up to date") + case "update_available": + ui.ListItem("warning", r.Name, "update available") case "drifted": ui.ListItem("warning", r.Name, r.Message) + case "dirty": + ui.ListItem("warning", r.Name, r.Message) case "local": ui.ListItem("info", r.Name, "local agent") case "error": diff --git a/cmd/skillshare/doctor.go b/cmd/skillshare/doctor.go index aa9aa08c..40187b04 100644 --- a/cmd/skillshare/doctor.go +++ b/cmd/skillshare/doctor.go @@ -142,7 +142,7 @@ func cmdDoctorGlobal(jsonMode bool) error { ui.Header("Storage") checkBackupStatus(result, false, backup.BackupDir()) checkTrashStatus(result, trash.TrashDir()) - checkVersionDoctor(cfg, result) + checkVersionDoctor(cfg, result, false) if jsonMode { return finalizeDoctorJSON(restoreUI, result, updateCh) @@ -200,7 +200,7 @@ func cmdDoctorProject(root string, jsonMode bool) error { ui.Header("Storage") checkBackupStatus(result, true, "") checkTrashStatus(result, trash.ProjectTrashDir(root)) - checkVersionDoctor(cfg, result) + checkVersionDoctor(cfg, result, true) if jsonMode { return finalizeDoctorJSON(restoreUI, result, updateCh) @@ -375,10 +375,9 @@ func checkTargets(cfg *config.Config, result *doctorResult, isProject bool) map[ // Prepare agent context for per-target agent checks agentsSource := cfg.EffectiveAgentsSource() agentsExist := dirExists(agentsSource) - var agentCount int + var discoveredAgents []resource.DiscoveredResource if agentsExist { - agents, _ := resource.AgentKind{}.Discover(agentsSource) - agentCount = len(agents) + discoveredAgents, _ = resource.AgentKind{}.Discover(agentsSource) } builtinAgents := config.DefaultAgentTargets() if isProject { @@ -426,7 +425,7 @@ func checkTargets(cfg *config.Config, result *doctorResult, isProject bool) map[ // Agent sub-check for this target if agentsExist { - checkAgentTargetInline(name, target, builtinAgents, agentCount, result) + checkAgentTargetInline(name, target, builtinAgents, discoveredAgents, result) } } @@ -1134,22 +1133,34 @@ func formatBytes(b int64) string { } // checkVersionDoctor checks CLI and skill versions -func checkVersionDoctor(cfg *config.Config, result *doctorResult) { +func checkVersionDoctor(cfg *config.Config, result *doctorResult, isProject bool) { ui.Header("Version") // CLI version ui.Success("CLI: %s", version) result.addCheck("cli_version", checkPass, fmt.Sprintf("CLI: %s", version), nil) - // Skill version (reads metadata.version from SKILL.md) + // Skill version: try SKILL.md frontmatter first, then metadata store localVersion := versioncheck.ReadLocalSkillVersion(cfg.Source) if localVersion == "" { - // Distinguish "file not found" from "version field missing" + // Try metadata store (tracks installed version even without metadata.version in SKILL.md) + store := install.LoadMetadataOrNew(cfg.Source) + if entry := store.Get("skillshare"); entry != nil && entry.Version != "" { + localVersion = strings.TrimPrefix(entry.Version, "v") + } + } + + if localVersion == "" { skillFile := filepath.Join(cfg.Source, "skillshare", "SKILL.md") if _, err := os.Stat(skillFile); os.IsNotExist(err) { - ui.Warning("Skill: not found") - ui.Info(" Run: skillshare upgrade --skill") - result.addCheck("skill_version", checkWarning, "Skill: not found", nil) + if isProject { + ui.Info("Skill: not installed") + result.addCheck("skill_version", checkInfo, "Skill: not installed in project", nil) + } else { + ui.Warning("Skill: not found") + ui.Info(" Run: skillshare upgrade --skill") + result.addCheck("skill_version", checkWarning, "Skill: not found", nil) + } } else { ui.Warning("Skill: missing version") result.addCheck("skill_version", checkWarning, "Skill: missing version", nil) diff --git a/cmd/skillshare/doctor_agents.go b/cmd/skillshare/doctor_agents.go index e7e35df7..956ae718 100644 --- a/cmd/skillshare/doctor_agents.go +++ b/cmd/skillshare/doctor_agents.go @@ -7,61 +7,92 @@ import ( "strings" "skillshare/internal/config" + "skillshare/internal/resource" + "skillshare/internal/sync" "skillshare/internal/ui" ) // checkAgentTargetInline validates the agent target for a single target, // printing as an indented sub-item under the target name in doctor output. -func checkAgentTargetInline(name string, target config.TargetConfig, builtinAgents map[string]config.TargetConfig, agentCount int, result *doctorResult) { +// It applies the target's include/exclude filters to compute the expected count. +func checkAgentTargetInline(name string, target config.TargetConfig, builtinAgents map[string]config.TargetConfig, allAgents []resource.DiscoveredResource, result *doctorResult) { agentPath := resolveAgentTargetPath(target, builtinAgents, name) if agentPath == "" { return } + ac := target.AgentsConfig() + mode := ac.Mode + if mode == "" { + mode = "merge" + } + + // Apply per-target include/exclude filters to get expected agent count + filtered, filterErr := sync.FilterAgents(allAgents, ac.Include, ac.Exclude) + if filterErr != nil { + fmt.Printf(" agents %s[%s] invalid filter: %s%s\n", ui.Red, mode, filterErr.Error(), ui.Reset) + result.addError() + result.addCheck("agent_target_"+name, checkError, + fmt.Sprintf("Agent target %s: invalid filter: %v", name, filterErr), nil) + return + } + agentCount := len(filtered) + + // Build details for JSON output + var details []string + details = append(details, fmt.Sprintf("path: %s", agentPath)) + details = append(details, fmt.Sprintf("mode: %s", mode)) + if len(ac.Include) > 0 { + details = append(details, fmt.Sprintf("include: %s", strings.Join(ac.Include, ", "))) + } + if len(ac.Exclude) > 0 { + details = append(details, fmt.Sprintf("exclude: %s", strings.Join(ac.Exclude, ", "))) + } + info, err := os.Stat(agentPath) if err != nil { if os.IsNotExist(err) { - fmt.Printf(" agents %s[merge] not created%s\n", ui.Gray, ui.Reset) + fmt.Printf(" agents %s[%s] not created%s\n", ui.Gray, mode, ui.Reset) result.addCheck("agent_target_"+name, checkPass, - fmt.Sprintf("Agent target %s: not created yet", name), nil) + fmt.Sprintf("Agent target %s: not created yet", name), details) return } - fmt.Printf(" agents %s[merge] error: %s%s\n", ui.Red, err.Error(), ui.Reset) + fmt.Printf(" agents %s[%s] error: %s%s\n", ui.Red, mode, err.Error(), ui.Reset) result.addError() result.addCheck("agent_target_"+name, checkError, - fmt.Sprintf("Agent target %s: %v", name, err), nil) + fmt.Sprintf("Agent target %s: %v", name, err), details) return } if !info.IsDir() { - fmt.Printf(" agents %s[merge] error: not a directory%s\n", ui.Red, ui.Reset) + fmt.Printf(" agents %s[%s] error: not a directory%s\n", ui.Red, mode, ui.Reset) result.addError() result.addCheck("agent_target_"+name, checkError, - fmt.Sprintf("Agent target %s: path is not a directory", name), nil) + fmt.Sprintf("Agent target %s: path is not a directory", name), details) return } linked, broken := countAgentLinksAndBroken(agentPath) if broken > 0 { - msg := fmt.Sprintf("[merge] %d linked, %d broken", linked, broken) + msg := fmt.Sprintf("[%s] %d linked, %d broken", mode, linked, broken) fmt.Printf(" agents %s%s%s\n", ui.Yellow, msg, ui.Reset) result.addWarning() result.addCheck("agent_target_"+name, checkWarning, - fmt.Sprintf("Agent target %s: %s", name, msg), nil) + fmt.Sprintf("Agent target %s: %s", name, msg), details) return } if linked != agentCount && agentCount > 0 { - fmt.Printf(" agents [merge] %sdrift%s %s(%d/%d linked)%s\n", ui.Yellow, ui.Reset, ui.Dim, linked, agentCount, ui.Reset) + fmt.Printf(" agents [%s] %sdrift%s %s(%d/%d linked)%s\n", mode, ui.Yellow, ui.Reset, ui.Dim, linked, agentCount, ui.Reset) result.addWarning() result.addCheck("agent_target_"+name, checkWarning, - fmt.Sprintf("Agent target %s: drift (%d/%d agents linked)", name, linked, agentCount), nil) + fmt.Sprintf("Agent target %s: drift (%d/%d agents linked)", name, linked, agentCount), details) return } - fmt.Printf(" agents [merge] %smerged%s %s(%d/%d linked)%s\n", ui.Green, ui.Reset, ui.Dim, linked, agentCount, ui.Reset) + fmt.Printf(" agents [%s] %ssynced%s %s(%d/%d linked)%s\n", mode, ui.Green, ui.Reset, ui.Dim, linked, agentCount, ui.Reset) result.addCheck("agent_target_"+name, checkPass, - fmt.Sprintf("Agent target %s: %d agents synced", name, linked), nil) + fmt.Sprintf("Agent target %s: %d agents synced", name, linked), details) } // countAgentLinksAndBroken counts .md symlinks and broken symlinks in a directory. diff --git a/cmd/skillshare/install.go b/cmd/skillshare/install.go index 19b08b65..e9926462 100644 --- a/cmd/skillshare/install.go +++ b/cmd/skillshare/install.go @@ -190,6 +190,9 @@ func parseInstallArgs(args []string) (*installArgs, bool, error) { if result.opts.HasSkillFilter() && result.opts.Track { return nil, false, fmt.Errorf("--skill cannot be used with --track") } + if result.opts.HasAgentFilter() && result.opts.Track { + return nil, false, fmt.Errorf("--agent cannot be used with --track") + } if result.opts.ShouldInstallAll() && result.opts.Track { return nil, false, fmt.Errorf("--all/--yes cannot be used with --track") } diff --git a/cmd/skillshare/install_handlers.go b/cmd/skillshare/install_handlers.go index 498d3664..7a70ff4d 100644 --- a/cmd/skillshare/install_handlers.go +++ b/cmd/skillshare/install_handlers.go @@ -16,6 +16,17 @@ import ( ) func handleTrackedRepoInstall(source *install.Source, cfg *config.Config, opts install.InstallOptions) (installLogSummary, error) { + trackedKind, err := install.InferTrackedKind(source, opts.Kind) + if err != nil { + return installLogSummary{}, err + } + opts.Kind = trackedKind + + trackSourceDir := cfg.Source + if trackedKind == "agent" { + trackSourceDir = cfg.EffectiveAgentsSource() + } + logSummary := installLogSummary{ Source: source.Raw, DryRun: opts.DryRun, @@ -50,7 +61,7 @@ func handleTrackedRepoInstall(source *install.Source, cfg *config.Config, opts i } } - result, err := install.InstallTrackedRepo(source, cfg.Source, opts) + result, err := install.InstallTrackedRepo(source, trackSourceDir, opts) if err != nil { if errors.Is(err, install.ErrSkipSameRepo) { treeSpinner.Warn(firstWarningLine(err.Error())) @@ -72,8 +83,13 @@ func handleTrackedRepoInstall(source *install.Source, cfg *config.Config, opts i fmt.Println() ui.Warning("[dry-run] Would install tracked repo") } else { - ui.StepContinue("Found", fmt.Sprintf("%d skill(s)", result.SkillCount)) - renderTrackedRepoMeta(result.RepoName, result.Skills, result.RepoPath) + if trackedKind == "agent" { + ui.StepContinue("Found", fmt.Sprintf("%d agent(s)", result.AgentCount)) + renderTrackedAgentRepoMeta(result.RepoName, result.Agents, result.RepoPath) + } else { + ui.StepContinue("Found", fmt.Sprintf("%d skill(s)", result.SkillCount)) + renderTrackedRepoMeta(result.RepoName, result.Skills, result.RepoPath) + } } // Display warnings and risk info @@ -93,8 +109,13 @@ func handleTrackedRepoInstall(source *install.Source, cfg *config.Config, opts i // Show next steps if !opts.DryRun { ui.SectionLabel("Next Steps") - ui.Info("Run 'skillshare sync' to distribute skills to all targets") - ui.Info("Run 'skillshare update %s' to update this repo later", result.RepoName) + if trackedKind == "agent" { + ui.Info("Run 'skillshare sync agents' to distribute agents to all targets") + ui.Info("Run 'skillshare update agents --all' to update tracked agent repos later") + } else { + ui.Info("Run 'skillshare sync' to distribute skills to all targets") + ui.Info("Run 'skillshare update %s' to update this repo later", result.RepoName) + } } return logSummary, nil @@ -1018,6 +1039,14 @@ func renderTrackedRepoMeta(repoName string, skills []string, repoPath string) { ui.StepEnd("Location", repoPath) } +func renderTrackedAgentRepoMeta(repoName string, agents []string, repoPath string) { + ui.StepContinue("Tracked", repoName) + if len(agents) > 0 && len(agents) <= 10 { + ui.StepContinue("Agents", strings.Join(agents, ", ")) + } + ui.StepEnd("Location", repoPath) +} + // truncateDesc truncates a description string to max runes, appending " ..." if truncated. func truncateDesc(s string, max int) string { runes := []rune(s) diff --git a/cmd/skillshare/list.go b/cmd/skillshare/list.go index 2c082bcf..8ce1c9f8 100644 --- a/cmd/skillshare/list.go +++ b/cmd/skillshare/list.go @@ -295,6 +295,12 @@ func discoverAndBuildAgentEntries(agentsSource string) []skillEntry { if !entry.InstalledAt.IsZero() { entries[i].InstalledAt = entry.InstalledAt.Format("2006-01-02") } + } else if d.RepoRelPath != "" { + repoPath := filepath.Join(agentsSource, filepath.FromSlash(d.RepoRelPath)) + if repoURL, err := git.GetRemoteURL(repoPath); err == nil { + entries[i].Source = repoURL + entries[i].Type = "git" + } } } return entries diff --git a/cmd/skillshare/update_agents.go b/cmd/skillshare/update_agents.go index a01b7777..f65a861f 100644 --- a/cmd/skillshare/update_agents.go +++ b/cmd/skillshare/update_agents.go @@ -63,12 +63,7 @@ func cmdUpdateAgents(args []string, cfg *config.Config, start time.Time) error { } // Only check agents that have remote sources - var tracked []check.AgentCheckResult - for _, r := range results { - if r.Source != "" { - tracked = append(tracked, r) - } - } + tracked := collectTrackedAgentResults(results) if len(tracked) == 0 { ui.Info("No tracked agents to update (all are local)") @@ -149,10 +144,15 @@ type agentRepoKey struct { // Agents with no RepoURL fall back to per-agent reinstallAgent. func batchUpdateAgents(agentsDir string, agents []check.AgentCheckResult, verbose bool) (updated, failed int) { store := install.LoadMetadataOrNew(agentsDir) + trackedRepos := map[string][]check.AgentCheckResult{} groups := map[agentRepoKey][]check.AgentCheckResult{} var noRepo []check.AgentCheckResult for _, r := range agents { + if r.RepoPath != "" { + trackedRepos[r.RepoPath] = append(trackedRepos[r.RepoPath], r) + continue + } if r.RepoURL == "" { noRepo = append(noRepo, r) continue @@ -179,6 +179,32 @@ func batchUpdateAgents(agentsDir string, agents []check.AgentCheckResult, verbos groups[key] = append(groups[key], r) } + for repoPath, members := range trackedRepos { + uc := &updateContext{ + sourcePath: agentsDir, + opts: &updateOptions{}, + } + ok, _, err := updateTrackedRepoQuick(uc, repoPath) + if err != nil { + for _, m := range members { + if verbose { + ui.Error(" %s: %v", m.Name, err) + } + failed++ + } + continue + } + if !ok { + continue + } + for _, m := range members { + if verbose { + ui.Success(" %s: updated", m.Name) + } + updated++ + } + } + // Batch: one clone per repo group for key, members := range groups { source := &install.Source{ @@ -371,6 +397,16 @@ func parseUpdateAgentArgs(args []string) (*updateAgentArgs, bool, error) { return opts, false, nil } +func collectTrackedAgentResults(results []check.AgentCheckResult) []check.AgentCheckResult { + tracked := make([]check.AgentCheckResult, 0, len(results)) + for _, r := range results { + if r.Source != "" { + tracked = append(tracked, r) + } + } + return tracked +} + func filterAgentCheckResults(results []check.AgentCheckResult, names []string) []check.AgentCheckResult { nameSet := make(map[string]bool, len(names)) for _, n := range names { @@ -530,12 +566,7 @@ func cmdUpdateAgentsProject(args []string, projectRoot string, start time.Time) } } - var tracked []check.AgentCheckResult - for _, r := range results { - if r.Source != "" { - tracked = append(tracked, r) - } - } + tracked := collectTrackedAgentResults(results) if len(tracked) == 0 { ui.Info("No tracked project agents to update (all are local)") @@ -566,17 +597,7 @@ func cmdUpdateAgentsProject(args []string, projectRoot string, start time.Time) return nil } - store := install.LoadMetadataOrNew(agentsDir) - var updated, failed int - for _, r := range updatable { - if err := reinstallAgent(agentsDir, r, store); err != nil { - ui.Error(" %s: %v", r.Name, err) - failed++ - } else { - ui.Success(" %s: updated", r.Name) - updated++ - } - } + updated, failed := batchUpdateAgents(agentsDir, updatable, true) logUpdateAgentOp(config.ProjectConfigPath(projectRoot), len(updatable), updated, failed, opts.dryRun, start) From 40dca838db6711a1a6a2f8d6b372d9d587a65be7 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 02:05:06 +0800 Subject: [PATCH 167/205] fix(cli): agent diff drift detection and sync prune when source empty - diff_agents: detect symlink drift, local copy blocking, fix prune count - sync_agents: continue to prune targets when source has no agents instead of returning early, matching skills behavior --- cmd/skillshare/diff_agents.go | 59 +++++++++++++++++++++++++++++------ cmd/skillshare/sync_agents.go | 20 ++++-------- 2 files changed, 56 insertions(+), 23 deletions(-) diff --git a/cmd/skillshare/diff_agents.go b/cmd/skillshare/diff_agents.go index 3a6641e8..8db57c61 100644 --- a/cmd/skillshare/diff_agents.go +++ b/cmd/skillshare/diff_agents.go @@ -2,12 +2,14 @@ package main import ( "os" + "path/filepath" "strings" "time" "skillshare/internal/config" "skillshare/internal/resource" "skillshare/internal/ui" + "skillshare/internal/utils" ) // diffProjectAgents computes agent diffs for project mode. @@ -95,9 +97,6 @@ func diffGlobalAgents(cfg *config.Config, targetName string, opts diffRenderOpts func mergeAgentDiffsGlobal(cfg *config.Config, results []targetDiffResult, targetName string) []targetDiffResult { agentsSource := cfg.EffectiveAgentsSource() agents, _ := resource.AgentKind{}.Discover(agentsSource) - if len(agents) == 0 { - return results - } builtinAgents := config.DefaultAgentTargets() var agentResults []targetDiffResult @@ -128,9 +127,6 @@ func mergeAgentDiffsProject(root string, results []targetDiffResult, targetName agentsSource := rt.agentsSourcePath agents, _ := resource.AgentKind{}.Discover(agentsSource) - if len(agents) == 0 { - return results - } builtinAgents := config.ProjectAgentTargets() var agentResults []targetDiffResult @@ -203,8 +199,9 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered } // Missing in target (need sync) - for flatName := range expected { - if _, ok := existing[flatName]; !ok { + for flatName, agent := range expected { + fileType, ok := existing[flatName] + if !ok { r.items = append(r.items, copyDiffEntry{ action: "add", name: flatName, @@ -214,13 +211,55 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered }) r.synced = false r.syncCount++ + continue + } + + targetPath := filepath.Join(targetDir, flatName) + if fileType&os.ModeSymlink != 0 || utils.IsSymlinkOrJunction(targetPath) { + absLink, err := utils.ResolveLinkTarget(targetPath) + if err != nil { + r.items = append(r.items, copyDiffEntry{ + action: "modify", + name: flatName, + kind: "agent", + reason: "link target unreadable", + isSync: true, + }) + r.synced = false + r.syncCount++ + continue + } + absSource, _ := filepath.Abs(agent.AbsPath) + if !utils.PathsEqual(absLink, absSource) { + r.items = append(r.items, copyDiffEntry{ + action: "modify", + name: flatName, + kind: "agent", + reason: "symlink points elsewhere", + isSync: true, + }) + r.synced = false + r.syncCount++ + } + continue } + + r.items = append(r.items, copyDiffEntry{ + action: "modify", + name: flatName, + kind: "agent", + reason: "local copy (sync --force to replace)", + isSync: true, + }) + r.synced = false + r.syncCount++ } // Extra in target (orphans) for name, fileType := range existing { if _, ok := expected[name]; !ok { - if fileType&os.ModeSymlink != 0 { + targetPath := filepath.Join(targetDir, name) + if fileType&os.ModeSymlink != 0 || utils.IsSymlinkOrJunction(targetPath) { r.items = append(r.items, copyDiffEntry{ action: "remove", name: name, @@ -229,6 +268,7 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered isSync: true, }) r.synced = false + r.syncCount++ } else { r.items = append(r.items, copyDiffEntry{ action: "local", @@ -241,5 +281,6 @@ func computeAgentDiff(targetName, targetDir string, agents []resource.Discovered } } + r.synced = r.syncCount == 0 && r.localCount == 0 return r } diff --git a/cmd/skillshare/sync_agents.go b/cmd/skillshare/sync_agents.go index 3a113877..3f58ce6c 100644 --- a/cmd/skillshare/sync_agents.go +++ b/cmd/skillshare/sync_agents.go @@ -43,15 +43,11 @@ func syncAgentsGlobal(cfg *config.Config, dryRun, force, jsonOutput bool, start } agents := resource.ActiveAgents(allAgents) - if len(agents) == 0 { - if !jsonOutput { - ui.Info("No agents found in %s", agentsSource) - } - return agentSyncStats{}, nil - } - if !jsonOutput { ui.Header("Syncing agents") + if len(agents) == 0 { + ui.Info("No agents found in %s; pruning synced target entries only", agentsSource) + } if dryRun { ui.Warning("Dry run mode - no changes will be made") } @@ -163,15 +159,11 @@ func syncAgentsProject(projectRoot string, dryRun, force, jsonOutput bool, start } agents := resource.ActiveAgents(allAgents) - if len(agents) == 0 { - if !jsonOutput { - ui.Info("No project agents found") - } - return nil - } - if !jsonOutput { ui.Header("Syncing agents (project)") + if len(agents) == 0 { + ui.Info("No project agents found; pruning synced target entries only") + } if dryRun { ui.Warning("Dry run mode - no changes will be made") } From da000c4d07eb2b2516ab4c68918b36f9df076a77 Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 02:05:13 +0800 Subject: [PATCH 168/205] feat(server): agent tracked repo install, prune-on-empty, audit guards - appendAgentDiffs helper deduplicates diff/stream agent logic - handler_agent_diff: skip local copies, junction-aware checks - handler_sync: prune orphan agents even when source is empty - handler_install: tracked agent repo auto-detection and routing - handler_skills: expose RepoRelPath and Targets for agents - handler_skills_batch: agent target assignment via frontmatter - handler_sync_matrix: pass agent Targets to ClassifySkillForTarget - handler_update: tracked agent repo git pull update - handler_audit: guard empty/missing source dirs gracefully - Tests for all of the above --- internal/server/agent_helpers.go | 23 ++++++ internal/server/handler_agent_diff.go | 20 +++-- internal/server/handler_audit.go | 36 +++++---- internal/server/handler_audit_unit_test.go | 91 ++++++++++++++++++++++ internal/server/handler_diff_stream.go | 19 +---- internal/server/handler_diff_test.go | 64 +++++++++++++++ internal/server/handler_install.go | 36 ++++++--- internal/server/handler_install_test.go | 72 +++++++++++++++++ internal/server/handler_skills.go | 22 ++++++ internal/server/handler_skills_batch.go | 40 +++++++++- internal/server/handler_sync.go | 54 +++++-------- internal/server/handler_sync_matrix.go | 4 +- internal/server/handler_sync_test.go | 59 ++++++++++++++ internal/server/handler_update.go | 7 +- 14 files changed, 465 insertions(+), 82 deletions(-) diff --git a/internal/server/agent_helpers.go b/internal/server/agent_helpers.go index bdc73d22..744c3d30 100644 --- a/internal/server/agent_helpers.go +++ b/internal/server/agent_helpers.go @@ -55,3 +55,26 @@ func mergeAgentDiffItems(diffs []diffTarget, name string, items []diffItem) []di Items: items, }) } + +// appendAgentDiffs merges agent diff items for every agent-capable target. +// Unlike sync-matrix, diff must still inspect targets when the source is empty +// so orphaned synced agents surface as prune drift, matching skills behavior. +func (s *Server) appendAgentDiffs(diffs []diffTarget, targets map[string]config.TargetConfig, agentsSource, filterTarget string) []diffTarget { + agents := discoverActiveAgents(agentsSource) + builtinAgents := s.builtinAgentTargets() + + for name, target := range targets { + if filterTarget != "" && filterTarget != name { + continue + } + agentPath := resolveAgentPath(target, builtinAgents, name) + if agentPath == "" { + continue + } + if items := computeAgentTargetDiff(agentPath, agents); len(items) > 0 { + diffs = mergeAgentDiffItems(diffs, name, items) + } + } + + return diffs +} diff --git a/internal/server/handler_agent_diff.go b/internal/server/handler_agent_diff.go index efbdabe5..671ff94c 100644 --- a/internal/server/handler_agent_diff.go +++ b/internal/server/handler_agent_diff.go @@ -36,7 +36,8 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour // Missing agents → link for flatName, agent := range expected { - if _, ok := existing[flatName]; !ok { + fileType, ok := existing[flatName] + if !ok { items = append(items, diffItem{ Skill: flatName, Action: "link", @@ -45,9 +46,10 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour }) continue } + // Exists — check if symlink points to correct source targetPath := filepath.Join(targetDir, flatName) - if utils.IsSymlinkOrJunction(targetPath) { + if fileType&os.ModeSymlink != 0 || utils.IsSymlinkOrJunction(targetPath) { absLink, err := utils.ResolveLinkTarget(targetPath) if err != nil { items = append(items, diffItem{ @@ -67,9 +69,16 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour Kind: kindAgent, }) } - // else: in sync, no item emitted + continue } - // Non-symlink existing file: already local, no action needed for expected agents + + // Match skills diff semantics: a local file blocks sync unless forced. + items = append(items, diffItem{ + Skill: flatName, + Action: "skip", + Reason: "local copy (sync --force to replace)", + Kind: kindAgent, + }) } // Orphan/local detection @@ -77,7 +86,8 @@ func computeAgentTargetDiff(targetDir string, agents []resource.DiscoveredResour if _, ok := expected[name]; ok { continue } - if fileType&os.ModeSymlink != 0 { + targetPath := filepath.Join(targetDir, name) + if fileType&os.ModeSymlink != 0 || utils.IsSymlinkOrJunction(targetPath) { items = append(items, diffItem{ Skill: name, Action: "prune", diff --git a/internal/server/handler_audit.go b/internal/server/handler_audit.go index 9c7ec090..3a7dcb4f 100644 --- a/internal/server/handler_audit.go +++ b/internal/server/handler_audit.go @@ -10,7 +10,6 @@ import ( "skillshare/internal/audit" "skillshare/internal/resource" "skillshare/internal/sync" - "skillshare/internal/utils" ) type auditFindingResponse struct { @@ -71,6 +70,16 @@ type skillEntry struct { // discoverAuditAgents discovers agents (individual .md files) for audit scanning. func discoverAuditAgents(source string) ([]skillEntry, error) { + if source == "" { + return []skillEntry{}, nil + } + if _, err := os.Stat(source); err != nil { + if os.IsNotExist(err) { + return []skillEntry{}, nil + } + return nil, err + } + discovered, err := resource.AgentKind{}.Discover(source) if err != nil { return nil, err @@ -84,8 +93,21 @@ func discoverAuditAgents(source string) ([]skillEntry, error) { // discoverAuditSkills discovers and deduplicates skills for audit scanning. func discoverAuditSkills(source string) ([]skillEntry, error) { + if source == "" { + return []skillEntry{}, nil + } + if _, err := os.Stat(source); err != nil { + if os.IsNotExist(err) { + return []skillEntry{}, nil + } + return nil, err + } + discovered, err := sync.DiscoverSourceSkills(source) if err != nil { + if os.IsNotExist(err) { + return []skillEntry{}, nil + } return nil, err } @@ -100,18 +122,6 @@ func discoverAuditSkills(source string) ([]skillEntry, error) { skills = append(skills, skillEntry{d.FlatName, d.SourcePath}) } - entries, _ := os.ReadDir(source) - for _, e := range entries { - if !e.IsDir() || utils.IsHidden(e.Name()) { - continue - } - p := filepath.Join(source, e.Name()) - if !seen[p] { - seen[p] = true - skills = append(skills, skillEntry{e.Name(), p}) - } - } - return skills, nil } diff --git a/internal/server/handler_audit_unit_test.go b/internal/server/handler_audit_unit_test.go index 19dca871..fcb6f8e0 100644 --- a/internal/server/handler_audit_unit_test.go +++ b/internal/server/handler_audit_unit_test.go @@ -6,6 +6,7 @@ import ( "net/http/httptest" "os" "path/filepath" + "strings" "testing" ) @@ -61,6 +62,96 @@ func TestHandleAuditAll_WithSkills(t *testing.T) { } } +func TestHandleAuditAll_IgnoresTopLevelDirsWithoutSkillDefinition(t *testing.T) { + s, src := newTestServer(t) + + repoDir := filepath.Join(src, "_vijaythecoder-awesome-claude-agents") + if err := os.MkdirAll(filepath.Join(repoDir, "agents", "core"), 0o755); err != nil { + t.Fatalf("failed to create repo dir: %v", err) + } + if err := os.WriteFile(filepath.Join(repoDir, "agents", "core", "code-reviewer.md"), []byte("# code reviewer"), 0o644); err != nil { + t.Fatalf("failed to write nested agent file: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/audit", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Results []any `json:"results"` + Summary struct { + Total int `json:"total"` + } `json:"summary"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + if len(resp.Results) != 0 { + t.Fatalf("expected 0 audited results, got %d", len(resp.Results)) + } + if resp.Summary.Total != 0 { + t.Fatalf("expected summary total 0, got %d", resp.Summary.Total) + } +} + +func TestHandleAuditAll_AgentsMissingSourceReturnsEmpty(t *testing.T) { + s, _ := newTestServer(t) + + req := httptest.NewRequest(http.MethodGet, "/api/audit?kind=agents", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Results []any `json:"results"` + Summary struct { + Total int `json:"total"` + } `json:"summary"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + if len(resp.Results) != 0 { + t.Fatalf("expected 0 agent audit results, got %d", len(resp.Results)) + } + if resp.Summary.Total != 0 { + t.Fatalf("expected summary total 0, got %d", resp.Summary.Total) + } +} + +func TestHandleAuditStream_AgentsMissingSourceReturnsEmpty(t *testing.T) { + s, _ := newTestServer(t) + + req := httptest.NewRequest(http.MethodGet, "/api/audit/stream?kind=agents", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + body := rr.Body.String() + if strings.Contains(body, "event: error") { + t.Fatalf("expected no error event, got: %s", body) + } + if !strings.Contains(body, "event: start") { + t.Fatalf("expected start event, got: %s", body) + } + if !strings.Contains(body, "\"total\":0") { + t.Fatalf("expected empty start payload, got: %s", body) + } + if !strings.Contains(body, "event: done") { + t.Fatalf("expected done event, got: %s", body) + } +} + func TestHandleAuditAll_IncludesCrossSkillResult(t *testing.T) { s, src := newTestServer(t) diff --git a/internal/server/handler_diff_stream.go b/internal/server/handler_diff_stream.go index 3b5da473..0d24c5d3 100644 --- a/internal/server/handler_diff_stream.go +++ b/internal/server/handler_diff_stream.go @@ -67,24 +67,7 @@ func (s *Server) handleDiffStream(w http.ResponseWriter, r *http.Request) { }) } - // Agent diffs - if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { - builtinAgents := s.builtinAgentTargets() - for name, target := range targets { - select { - case <-ctx.Done(): - return - default: - } - agentPath := resolveAgentPath(target, builtinAgents, name) - if agentPath == "" { - continue - } - if items := computeAgentTargetDiff(agentPath, agents); len(items) > 0 { - diffs = mergeAgentDiffItems(diffs, name, items) - } - } - } + diffs = s.appendAgentDiffs(diffs, targets, agentsSource, "") donePayload := map[string]any{"diffs": diffs} maps.Copy(donePayload, ignorePayload(ignoreStats)) diff --git a/internal/server/handler_diff_test.go b/internal/server/handler_diff_test.go index c410ecc8..068308ae 100644 --- a/internal/server/handler_diff_test.go +++ b/internal/server/handler_diff_test.go @@ -4,8 +4,11 @@ import ( "encoding/json" "net/http" "net/http/httptest" + "os" "path/filepath" "testing" + + "skillshare/internal/config" ) func TestHandleDiff_Empty(t *testing.T) { @@ -48,3 +51,64 @@ func TestHandleDiff_WithTarget(t *testing.T) { t.Fatalf("expected 1 diff target, got %d", len(resp.Diffs)) } } + +func TestHandleDiff_AgentPruneWhenSourceEmpty(t *testing.T) { + s, _ := newTestServer(t) + + agentSource := filepath.Join(t.TempDir(), "agents") + agentTarget := filepath.Join(t.TempDir(), "claude-agents") + if err := os.MkdirAll(agentSource, 0o755); err != nil { + t.Fatalf("mkdir agent source: %v", err) + } + if err := os.MkdirAll(agentTarget, 0o755); err != nil { + t.Fatalf("mkdir agent target: %v", err) + } + if err := os.Symlink(filepath.Join(agentSource, "tutor.md"), filepath.Join(agentTarget, "tutor.md")); err != nil { + t.Fatalf("seed orphan agent symlink: %v", err) + } + + s.cfg.AgentsSource = agentSource + s.cfg.Targets["claude"] = config.TargetConfig{ + Skills: &config.ResourceTargetConfig{Path: filepath.Join(t.TempDir(), "claude-skills")}, + Agents: &config.ResourceTargetConfig{Path: agentTarget}, + } + if err := s.cfg.Save(); err != nil { + t.Fatalf("save config: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/diff", nil) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + var resp struct { + Diffs []struct { + Target string `json:"target"` + Items []struct { + Skill string `json:"skill"` + Action string `json:"action"` + Kind string `json:"kind"` + } `json:"items"` + } `json:"diffs"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("unmarshal diff response: %v", err) + } + + if len(resp.Diffs) != 1 { + t.Fatalf("expected 1 diff target, got %d", len(resp.Diffs)) + } + if resp.Diffs[0].Target != "claude" { + t.Fatalf("expected claude target, got %q", resp.Diffs[0].Target) + } + if len(resp.Diffs[0].Items) != 1 { + t.Fatalf("expected 1 diff item, got %d", len(resp.Diffs[0].Items)) + } + item := resp.Diffs[0].Items[0] + if item.Skill != "tutor.md" || item.Action != "prune" || item.Kind != "agent" { + t.Fatalf("unexpected diff item: %+v", item) + } +} diff --git a/internal/server/handler_install.go b/internal/server/handler_install.go index aaa4a480..5508a6ca 100644 --- a/internal/server/handler_install.go +++ b/internal/server/handler_install.go @@ -323,20 +323,33 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { // Tracked repo install if body.Track { + trackedKind, err := install.InferTrackedKind(source, body.Kind) + if err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + trackSourceDir := s.cfg.Source + if trackedKind == "agent" { + trackSourceDir = s.agentsSource() + } + installOpts := install.InstallOptions{ Name: body.Name, + Kind: trackedKind, Force: body.Force, SkipAudit: body.SkipAudit, Into: body.Into, Branch: body.Branch, AuditThreshold: s.auditThreshold(), - SourceDir: s.cfg.Source, + SourceDir: trackSourceDir, } if s.IsProjectMode() { installOpts.AuditProjectRoot = s.projectRoot } - result, err := install.InstallTrackedRepo(source, s.cfg.Source, install.InstallOptions{ + result, err := install.InstallTrackedRepo(source, trackSourceDir, install.InstallOptions{ Name: installOpts.Name, + Kind: installOpts.Kind, Force: installOpts.Force, SkipAudit: installOpts.SkipAudit, Into: installOpts.Into, @@ -358,13 +371,15 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { return } // Reconcile config after tracked repo install - if s.IsProjectMode() { - if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { - log.Printf("warning: failed to reconcile project skills config: %v", rErr) - } - } else { - if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { - log.Printf("warning: failed to reconcile global skills config: %v", rErr) + if trackedKind == "skill" { + if s.IsProjectMode() { + if rErr := config.ReconcileProjectSkills(s.projectRoot, s.projectCfg, s.skillsStore, s.cfg.Source); rErr != nil { + log.Printf("warning: failed to reconcile project skills config: %v", rErr) + } + } else { + if rErr := config.ReconcileGlobalSkills(s.cfg, s.skillsStore); rErr != nil { + log.Printf("warning: failed to reconcile global skills config: %v", rErr) + } } } @@ -372,6 +387,7 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { "source": body.Source, "mode": s.installLogMode(), "tracked": true, + "kind": trackedKind, "force": body.Force, "threshold": s.auditThreshold(), "scope": "ui", @@ -391,7 +407,9 @@ func (s *Server) handleInstall(w http.ResponseWriter, r *http.Request) { writeJSON(w, map[string]any{ "repoName": result.RepoName, "skillCount": result.SkillCount, + "agentCount": result.AgentCount, "skills": result.Skills, + "agents": result.Agents, "action": result.Action, "warnings": result.Warnings, }) diff --git a/internal/server/handler_install_test.go b/internal/server/handler_install_test.go index 871bd7a4..a1b5bc02 100644 --- a/internal/server/handler_install_test.go +++ b/internal/server/handler_install_test.go @@ -13,6 +13,14 @@ import ( "skillshare/internal/install" ) +func entryNames(entries []os.DirEntry) []string { + names := make([]string, 0, len(entries)) + for _, entry := range entries { + names = append(names, entry.Name()) + } + return names +} + func TestHandleInstallBatch_AgentInstallWritesMetadataToAgentsSource(t *testing.T) { s, skillsDir := newTestServer(t) @@ -167,3 +175,67 @@ func TestHandleInstallBatch_LocalAgentInstallPreservesNestedPath(t *testing.T) { t.Fatal("expected nested agent metadata loaded into agentsStore") } } + +func TestHandleInstall_TrackPureAgentRepoInstallsIntoAgentsSource(t *testing.T) { + s, skillsDir := newTestServer(t) + + agentsDir := filepath.Join(t.TempDir(), "agents") + if err := os.MkdirAll(agentsDir, 0o755); err != nil { + t.Fatalf("failed to create agents dir: %v", err) + } + s.cfg.AgentsSource = agentsDir + s.agentsStore = install.NewMetadataStore() + + repoDir := t.TempDir() + initGitRepo(t, repoDir) + + agentPath := filepath.Join(repoDir, "reviewer.md") + if err := os.WriteFile(agentPath, []byte("# Reviewer v1"), 0o644); err != nil { + t.Fatalf("failed to write agent file: %v", err) + } + for _, args := range [][]string{ + {"add", "reviewer.md"}, + {"commit", "-m", "add reviewer agent"}, + } { + cmd := exec.Command("git", args...) + cmd.Dir = repoDir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git %v failed: %s %v", args, out, err) + } + } + + payload, err := json.Marshal(map[string]any{ + "source": "file://" + repoDir, + "track": true, + }) + if err != nil { + t.Fatalf("failed to marshal payload: %v", err) + } + + req := httptest.NewRequest(http.MethodPost, "/api/install", bytes.NewReader(payload)) + rr := httptest.NewRecorder() + s.mux.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("unexpected status: got %d, body=%s", rr.Code, rr.Body.String()) + } + + var resp struct { + RepoName string `json:"repoName"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to decode response: %v", err) + } + trackedRepoName := resp.RepoName + if _, err := os.Stat(filepath.Join(agentsDir, trackedRepoName, ".git")); err != nil { + agentEntries, _ := os.ReadDir(agentsDir) + skillEntries, _ := os.ReadDir(skillsDir) + t.Fatalf("expected tracked agent repo in agents source: %v (agents=%v skills=%v body=%s)", err, entryNames(agentEntries), entryNames(skillEntries), rr.Body.String()) + } + if _, err := os.Stat(filepath.Join(agentsDir, trackedRepoName, "reviewer.md")); err != nil { + t.Fatalf("expected tracked agent file in agents source: %v", err) + } + if _, err := os.Stat(filepath.Join(skillsDir, trackedRepoName)); !os.IsNotExist(err) { + t.Fatalf("expected no tracked agent repo in skills source, got err=%v", err) + } +} diff --git a/internal/server/handler_skills.go b/internal/server/handler_skills.go index bf1e205a..2ca95a65 100644 --- a/internal/server/handler_skills.go +++ b/internal/server/handler_skills.go @@ -101,7 +101,9 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { FlatName: d.FlatName, RelPath: d.RelPath, SourcePath: d.SourcePath, + IsInRepo: d.IsInRepo, Disabled: d.Disabled, + Targets: d.Targets, } // Read from centralized agents metadata store @@ -114,6 +116,15 @@ func (s *Server) handleListSkills(w http.ResponseWriter, r *http.Request) { item.Type = entry.Type item.RepoURL = entry.RepoURL item.Version = entry.Version + } else if d.RepoRelPath != "" { + repoPath := filepath.Join(agentsSource, filepath.FromSlash(d.RepoRelPath)) + if repoURL, err := git.GetRemoteURL(repoPath); err == nil { + item.Source = repoURL + item.RepoURL = repoURL + } + if version, err := git.GetCurrentFullHash(repoPath); err == nil { + item.Version = version + } } items = append(items, item) @@ -227,7 +238,9 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { FlatName: d.FlatName, RelPath: d.RelPath, SourcePath: d.SourcePath, + IsInRepo: d.IsInRepo, Disabled: d.Disabled, + Targets: d.Targets, } agentKey := strings.TrimSuffix(d.RelPath, ".md") @@ -239,6 +252,15 @@ func (s *Server) handleGetSkill(w http.ResponseWriter, r *http.Request) { item.Type = entry.Type item.RepoURL = entry.RepoURL item.Version = entry.Version + } else if d.RepoRelPath != "" { + repoPath := filepath.Join(agentsSource, filepath.FromSlash(d.RepoRelPath)) + if repoURL, err := git.GetRemoteURL(repoPath); err == nil { + item.Source = repoURL + item.RepoURL = repoURL + } + if version, err := git.GetCurrentFullHash(repoPath); err == nil { + item.Version = version + } } writeJSON(w, map[string]any{ diff --git a/internal/server/handler_skills_batch.go b/internal/server/handler_skills_batch.go index a204b441..af0656cc 100644 --- a/internal/server/handler_skills_batch.go +++ b/internal/server/handler_skills_batch.go @@ -7,6 +7,7 @@ import ( "strings" "time" + "skillshare/internal/resource" ssync "skillshare/internal/sync" "skillshare/internal/utils" ) @@ -204,5 +205,42 @@ func (s *Server) handleSetSkillTargets(w http.ResponseWriter, r *http.Request) { return } - writeError(w, http.StatusNotFound, "skill not found: "+name) + // Try agents if skill not found + s.mu.RLock() + agentsSource := s.agentsSource() + s.mu.RUnlock() + + if agentsSource != "" { + agents, _ := resource.AgentKind{}.Discover(agentsSource) + for _, d := range agents { + if d.FlatName != name { + continue + } + + var values []string + if req.Target != "" { + values = []string{req.Target} + } + + s.mu.Lock() + err := utils.SetFrontmatterList(d.SourcePath, "targets", values) + s.mu.Unlock() + + if err != nil { + writeError(w, http.StatusInternalServerError, "failed to update agent: "+err.Error()) + return + } + + s.writeOpsLog("set-agent-targets", "ok", start, map[string]any{ + "name": name, + "target": req.Target, + "scope": "ui", + }, "") + + writeJSON(w, map[string]any{"success": true}) + return + } + } + + writeError(w, http.StatusNotFound, "resource not found: "+name) } diff --git a/internal/server/handler_sync.go b/internal/server/handler_sync.go index c7f63ff8..e6da94fc 100644 --- a/internal/server/handler_sync.go +++ b/internal/server/handler_sync.go @@ -4,6 +4,7 @@ import ( "encoding/json" "maps" "net/http" + "os" "time" "skillshare/internal/config" @@ -175,7 +176,8 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { // Agent sync (skip when kind == "skill") if body.Kind != kindSkill { agentsSource := s.agentsSource() - if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { + if info, err := os.Stat(agentsSource); err == nil && info.IsDir() { + agents := discoverActiveAgents(agentsSource) builtinAgents := s.builtinAgentTargets() for name, target := range s.cfg.Targets { @@ -195,6 +197,15 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { continue } + // Prune orphan agents even when the source is empty so uninstall-all + // matches skills and clears previously synced target entries. + var pruned []string + if agentMode == "merge" { + pruned, _ = ssync.PruneOrphanAgentLinks(agentPath, agents, body.DryRun) + } else if agentMode == "copy" { + pruned, _ = ssync.PruneOrphanAgentCopies(agentPath, agents, body.DryRun) + } + // Find or create result entry for this target idx := -1 for i := range results { @@ -203,29 +214,21 @@ func (s *Server) handleSync(w http.ResponseWriter, r *http.Request) { break } } - if idx >= 0 { - results[idx].Linked = append(results[idx].Linked, agentResult.Linked...) - results[idx].Updated = append(results[idx].Updated, agentResult.Updated...) - results[idx].Skipped = append(results[idx].Skipped, agentResult.Skipped...) - } else if len(agentResult.Linked) > 0 || len(agentResult.Updated) > 0 || len(agentResult.Skipped) > 0 { + if idx < 0 && (len(agentResult.Linked) > 0 || len(agentResult.Updated) > 0 || len(agentResult.Skipped) > 0 || len(pruned) > 0) { results = append(results, syncTargetResult{ Target: name, - Linked: agentResult.Linked, - Updated: agentResult.Updated, - Skipped: agentResult.Skipped, + Linked: make([]string, 0), + Updated: make([]string, 0), + Skipped: make([]string, 0), Pruned: make([]string, 0), }) idx = len(results) - 1 } - // Prune orphan agents — reuse idx to avoid re-scanning - var pruned []string - if agentMode == "merge" { - pruned, _ = ssync.PruneOrphanAgentLinks(agentPath, agents, body.DryRun) - } else if agentMode == "copy" { - pruned, _ = ssync.PruneOrphanAgentCopies(agentPath, agents, body.DryRun) - } - if idx >= 0 && len(pruned) > 0 { + if idx >= 0 { + results[idx].Linked = append(results[idx].Linked, agentResult.Linked...) + results[idx].Updated = append(results[idx].Updated, agentResult.Updated...) + results[idx].Skipped = append(results[idx].Skipped, agentResult.Skipped...) results[idx].Pruned = append(results[idx].Pruned, pruned...) } } @@ -293,22 +296,7 @@ func (s *Server) handleDiff(w http.ResponseWriter, r *http.Request) { diffs = append(diffs, s.computeTargetDiff(name, target, discovered, globalMode, source)) } - // Agent diffs - if agents := discoverActiveAgents(agentsSource); len(agents) > 0 { - builtinAgents := s.builtinAgentTargets() - for name, target := range targets { - if filterTarget != "" && filterTarget != name { - continue - } - agentPath := resolveAgentPath(target, builtinAgents, name) - if agentPath == "" { - continue - } - if items := computeAgentTargetDiff(agentPath, agents); len(items) > 0 { - diffs = mergeAgentDiffItems(diffs, name, items) - } - } - } + diffs = s.appendAgentDiffs(diffs, targets, agentsSource, filterTarget) resp := map[string]any{"diffs": diffs} maps.Copy(resp, ignorePayload(ignoreStats)) diff --git a/internal/server/handler_sync_matrix.go b/internal/server/handler_sync_matrix.go index 769ec44e..4496083b 100644 --- a/internal/server/handler_sync_matrix.go +++ b/internal/server/handler_sync_matrix.go @@ -99,7 +99,7 @@ func (s *Server) handleSyncMatrix(w http.ResponseWriter, r *http.Request) { } } else { for _, agent := range agents { - status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, name, ac.Include, ac.Exclude) + status, reason := ssync.ClassifySkillForTarget(agent.FlatName, agent.Targets, name, ac.Include, ac.Exclude) entries = append(entries, syncMatrixEntry{ Skill: agent.FlatName, Target: name, @@ -209,7 +209,7 @@ func (s *Server) handleSyncMatrixPreview(w http.ResponseWriter, r *http.Request) } } else { for _, agent := range agents { - status, reason := ssync.ClassifySkillForTarget(agent.FlatName, nil, body.Target, body.AgentInclude, body.AgentExclude) + status, reason := ssync.ClassifySkillForTarget(agent.FlatName, agent.Targets, body.Target, body.AgentInclude, body.AgentExclude) entries = append(entries, syncMatrixEntry{ Skill: agent.FlatName, Target: body.Target, diff --git a/internal/server/handler_sync_test.go b/internal/server/handler_sync_test.go index 237b82c1..db32f015 100644 --- a/internal/server/handler_sync_test.go +++ b/internal/server/handler_sync_test.go @@ -9,6 +9,7 @@ import ( "strings" "testing" + "skillshare/internal/config" "skillshare/internal/install" ) @@ -99,3 +100,61 @@ func TestHandleSync_NoTargets(t *testing.T) { t.Errorf("expected 0 results for no targets, got %d", len(resp.Results)) } } + +func TestHandleSync_AgentPrunesOrphanWhenSourceEmpty(t *testing.T) { + s, _ := newTestServer(t) + + agentSource := filepath.Join(t.TempDir(), "agents") + agentTarget := filepath.Join(t.TempDir(), "claude-agents") + if err := os.MkdirAll(agentSource, 0o755); err != nil { + t.Fatalf("mkdir agent source: %v", err) + } + if err := os.MkdirAll(agentTarget, 0o755); err != nil { + t.Fatalf("mkdir agent target: %v", err) + } + orphanPath := filepath.Join(agentTarget, "tutor.md") + if err := os.Symlink(filepath.Join(agentSource, "tutor.md"), orphanPath); err != nil { + t.Fatalf("seed orphan agent symlink: %v", err) + } + + s.cfg.AgentsSource = agentSource + s.cfg.Targets["claude"] = config.TargetConfig{ + Skills: &config.ResourceTargetConfig{Path: filepath.Join(t.TempDir(), "claude-skills")}, + Agents: &config.ResourceTargetConfig{Path: agentTarget}, + } + if err := s.cfg.Save(); err != nil { + t.Fatalf("save config: %v", err) + } + + req := httptest.NewRequest(http.MethodPost, "/api/sync", strings.NewReader(`{"kind":"agent"}`)) + rr := httptest.NewRecorder() + s.handler.ServeHTTP(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("expected 200, got %d: %s", rr.Code, rr.Body.String()) + } + + if _, err := os.Lstat(orphanPath); !os.IsNotExist(err) { + t.Fatalf("expected orphan agent symlink to be pruned, got err=%v", err) + } + + var resp struct { + Results []struct { + Target string `json:"target"` + Pruned []string `json:"pruned"` + } `json:"results"` + } + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("unmarshal sync response: %v", err) + } + + if len(resp.Results) != 1 { + t.Fatalf("expected 1 sync result, got %d", len(resp.Results)) + } + if resp.Results[0].Target != "claude" { + t.Fatalf("expected claude target, got %q", resp.Results[0].Target) + } + if len(resp.Results[0].Pruned) != 1 || resp.Results[0].Pruned[0] != "tutor.md" { + t.Fatalf("expected pruned tutor.md, got %+v", resp.Results[0].Pruned) + } +} diff --git a/internal/server/handler_update.go b/internal/server/handler_update.go index 5597a9be..971eade9 100644 --- a/internal/server/handler_update.go +++ b/internal/server/handler_update.go @@ -159,7 +159,7 @@ func (s *Server) updateSingleByKind(name, kind string, force, skipAudit bool) up } } -func (s *Server) updateAgent(name string, _ bool, _ bool) updateResultItem { +func (s *Server) updateAgent(name string, force, skipAudit bool) updateResultItem { agentsSource := s.agentsSource() if agentsSource == "" { return updateResultItem{Name: name, Kind: "agent", Action: "error", Message: "agents source is not configured"} @@ -170,6 +170,11 @@ func (s *Server) updateAgent(name string, _ bool, _ bool) updateResultItem { return updateResultItem{Name: name, Kind: "agent", Action: "error", Message: err.Error()} } + if localAgent.RepoRelPath != "" { + repoPath := filepath.Join(agentsSource, filepath.FromSlash(localAgent.RepoRelPath)) + return s.updateTrackedRepo(agentMetaKey(localAgent.RelPath), repoPath, force, skipAudit) + } + metaKey := agentMetaKey(localAgent.RelPath) entry := s.agentsStore.GetByPath(metaKey) if entry == nil || entry.Source == "" { From 5c99a1cb9f12f654103b36dc463b17a9e9f14abe Mon Sep 17 00:00:00 2001 From: Willie Date: Fri, 10 Apr 2026 02:05:23 +0800 Subject: [PATCH 169/205] feat(ui): audit cache invalidation, agent target display, UX polish - auditCache: centralized cache with installed-count staleness check - resourceNames: agent display name formatting (strip .md suffix) - AuditPage: skip audit when 0 installed, show empty state per kind - clearAuditCache on install/uninstall/update across all pages - ResourcesPage: agent target summary, context menu for agents - FilterStudioPage: agent display name in preview search - SkillPickerModal: accessible radio button (button element) - fieldDocs: agents sub-keys no longer marked reserved - Integration tests: diff/sync prune after uninstall-all, tracked agent install --- tests/integration/agent_coverage_gaps_test.go | 85 +++++++++++++++++++ tests/integration/install_agent_test.go | 70 +++++++++++++++ ui/src/components/InstallForm.tsx | 2 + ui/src/components/SkillPickerModal.tsx | 19 +++-- ui/src/lib/auditCache.test.ts | 37 ++++++++ ui/src/lib/auditCache.ts | 35 ++++++++ ui/src/lib/fieldDocs.ts | 24 ++++-- ui/src/lib/resourceNames.test.ts | 24 ++++++ ui/src/lib/resourceNames.ts | 11 +++ ui/src/pages/AuditPage.tsx | 48 +++++++---- ui/src/pages/BatchUninstallPage.tsx | 2 + ui/src/pages/DashboardPage.tsx | 2 + ui/src/pages/DoctorPage.tsx | 1 + ui/src/pages/FilterStudioPage.tsx | 11 ++- ui/src/pages/ResourceDetailPage.tsx | 3 + ui/src/pages/ResourcesPage.tsx | 57 +++++++++---- ui/src/pages/SearchPage.tsx | 5 ++ ui/src/pages/UpdatePage.tsx | 2 + 18 files changed, 387 insertions(+), 51 deletions(-) create mode 100644 ui/src/lib/auditCache.test.ts create mode 100644 ui/src/lib/auditCache.ts create mode 100644 ui/src/lib/resourceNames.test.ts create mode 100644 ui/src/lib/resourceNames.ts diff --git a/tests/integration/agent_coverage_gaps_test.go b/tests/integration/agent_coverage_gaps_test.go index d1666fb7..e4e90a34 100644 --- a/tests/integration/agent_coverage_gaps_test.go +++ b/tests/integration/agent_coverage_gaps_test.go @@ -3,6 +3,7 @@ package integration import ( + "encoding/json" "os" "path/filepath" "testing" @@ -144,6 +145,90 @@ targets: } } +func TestDiff_Default_ShowsAgentPruneAfterUninstallAll(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + sb.RunCLI("sync", "-g", "agents").AssertSuccess(t) + sb.RunCLI("uninstall", "-g", "agents", "--all", "--force").AssertSuccess(t) + + result := sb.RunCLI("diff", "-g", "--json") + result.AssertSuccess(t) + + output := parseJSON(t, result.Stdout) + targets, ok := output["targets"].([]any) + if !ok || len(targets) == 0 { + t.Fatalf("expected diff targets, got %v", output["targets"]) + } + + foundPrune := false + for _, rawTarget := range targets { + target, ok := rawTarget.(map[string]any) + if !ok || target["name"] != "claude" { + continue + } + items, _ := target["items"].([]any) + for _, rawItem := range items { + item, ok := rawItem.(map[string]any) + if !ok { + continue + } + if item["name"] == "tutor.md" && item["kind"] == "agent" && item["action"] == "remove" { + foundPrune = true + } + } + } + + if !foundPrune { + pretty, _ := json.MarshalIndent(output, "", " ") + t.Fatalf("expected agent prune in diff output, got:\n%s", string(pretty)) + } +} + +func TestSync_Agents_PrunesTargetAfterUninstallAll(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + createAgentSource(t, sb, map[string]string{ + "tutor.md": "# Tutor agent", + }) + claudeAgents := createAgentTarget(t, sb, "claude") + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: + claude: + skills: + path: ` + sb.CreateTarget("claude") + ` + agents: + path: ` + claudeAgents + ` +`) + + sb.RunCLI("sync", "-g", "agents").AssertSuccess(t) + sb.RunCLI("uninstall", "-g", "agents", "--all", "--force").AssertSuccess(t) + + syncResult := sb.RunCLI("sync", "-g", "agents") + syncResult.AssertSuccess(t) + syncResult.AssertAnyOutputContains(t, "1 pruned") + + if _, err := os.Lstat(filepath.Join(claudeAgents, "tutor.md")); !os.IsNotExist(err) { + t.Fatalf("expected tutor.md to be pruned from target, got err=%v", err) + } +} + // --- list agents JSON with kind field --- func TestList_Agents_JSON_AllEntriesHaveKind(t *testing.T) { diff --git a/tests/integration/install_agent_test.go b/tests/integration/install_agent_test.go index 8bf95574..6a3a330b 100644 --- a/tests/integration/install_agent_test.go +++ b/tests/integration/install_agent_test.go @@ -4,9 +4,11 @@ package integration import ( "os" + "os/exec" "path/filepath" "testing" + "skillshare/internal/install" "skillshare/internal/testutil" ) @@ -231,3 +233,71 @@ targets: {} t.Error("agent should NOT be in skills source dir") } } + +func TestInstall_TrackAgentRepo_UsesTrackedRepoFlow(t *testing.T) { + sb := testutil.NewSandbox(t) + defer sb.Cleanup() + + sb.WriteConfig(`source: ` + sb.SourcePath + ` +targets: {} +`) + + repoDir := filepath.Join(sb.Home, "tracked-agent-repo") + if err := os.MkdirAll(repoDir, 0o755); err != nil { + t.Fatalf("mkdir repo: %v", err) + } + if err := os.WriteFile(filepath.Join(repoDir, "reviewer.md"), []byte("# Reviewer v1"), 0o644); err != nil { + t.Fatalf("write agent: %v", err) + } + initGitRepo(t, repoDir) + + installResult := sb.RunCLI("install", "file://"+repoDir, "--track", "--kind", "agent") + installResult.AssertSuccess(t) + + agentsDir := filepath.Join(filepath.Dir(sb.SourcePath), "agents") + source, err := install.ParseSource("file://" + repoDir) + if err != nil { + t.Fatalf("parse source: %v", err) + } + trackedRepoDir := filepath.Join(agentsDir, "_"+source.TrackName()) + if _, err := os.Stat(filepath.Join(trackedRepoDir, ".git")); err != nil { + t.Fatalf("expected tracked agent repo .git to exist: %v", err) + } + if _, err := os.Stat(filepath.Join(trackedRepoDir, "reviewer.md")); err != nil { + t.Fatalf("expected tracked agent file to exist: %v", err) + } + if _, err := os.Stat(filepath.Join(sb.SourcePath, "_tracked-agent-repo")); !os.IsNotExist(err) { + t.Fatalf("expected no tracked agent repo in skills source, got err=%v", err) + } + + checkResult := sb.RunCLI("check", "agents") + checkResult.AssertSuccess(t) + checkResult.AssertAnyOutputContains(t, "reviewer") + checkResult.AssertOutputNotContains(t, "local agent") + + if err := os.WriteFile(filepath.Join(repoDir, "reviewer.md"), []byte("# Reviewer v2"), 0o644); err != nil { + t.Fatalf("update agent: %v", err) + } + for _, args := range [][]string{ + {"add", "reviewer.md"}, + {"commit", "-m", "update reviewer"}, + } { + cmd := exec.Command("git", args...) + cmd.Dir = repoDir + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git %v failed: %s %v", args, out, err) + } + } + + updateResult := sb.RunCLI("update", "agents", "--all") + updateResult.AssertSuccess(t) + updateResult.AssertAnyOutputContains(t, "updated") + + content, err := os.ReadFile(filepath.Join(trackedRepoDir, "reviewer.md")) + if err != nil { + t.Fatalf("read updated agent: %v", err) + } + if string(content) != "# Reviewer v2" { + t.Fatalf("expected updated tracked agent content, got %q", string(content)) + } +} diff --git a/ui/src/components/InstallForm.tsx b/ui/src/components/InstallForm.tsx index 2791109f..743ebfbd 100644 --- a/ui/src/components/InstallForm.tsx +++ b/ui/src/components/InstallForm.tsx @@ -10,6 +10,7 @@ import ConfirmDialog from './ConfirmDialog'; import { useToast } from './Toast'; import { api, type InstallResult, type DiscoveredSkill, type DiscoveredAgent } from '../api/client'; import { queryKeys } from '../lib/queryKeys'; +import { clearAuditCache } from '../lib/auditCache'; import { radius } from '../design'; interface InstallFormProps { @@ -142,6 +143,7 @@ export default function InstallForm({ }; const invalidateAfterInstall = () => { + clearAuditCache(queryClient); queryClient.invalidateQueries({ queryKey: queryKeys.skills.all }); queryClient.invalidateQueries({ queryKey: queryKeys.overview }); }; diff --git a/ui/src/components/SkillPickerModal.tsx b/ui/src/components/SkillPickerModal.tsx index 0b400e33..c6e368eb 100644 --- a/ui/src/components/SkillPickerModal.tsx +++ b/ui/src/components/SkillPickerModal.tsx @@ -145,25 +145,32 @@ export default function SkillPickerModal({ )} {/* Skill list */} -
+
{filtered.map((skill) => { const isSelected = selected.has(skill.path); return (