|
| 1 | +#!/usr/bin/env bb |
| 2 | +;; spai plugin: search |
| 3 | +;; |
| 4 | +;; Natural language → spai command recommendation via local Ollama. |
| 5 | +;; "Which spai command do I need?" answered by a tiny local model. |
| 6 | +;; |
| 7 | +;; Usage: spai search "find class predicates" |
| 8 | +;; spai search "what changed recently" |
| 9 | +;; spai search --model qwen2.5-coder:3b "who uses this file" |
| 10 | + |
| 11 | +{:doap/name "search" |
| 12 | + :doap/description "NL search over spai's tool catalog via local Ollama" |
| 13 | + :dc/creator "Claude + Lance" |
| 14 | + :spai/args "\"natural language query\" [--model name]" |
| 15 | + :spai/returns "EDN: matched command(s) with usage and rationale" |
| 16 | + :spai/example "spai search \"find what predicates a class has\"" |
| 17 | + :spai/tags #{:meta :discovery :nl}} |
| 18 | + |
| 19 | +(require '[babashka.http-client :as http] |
| 20 | + '[babashka.process :as p] |
| 21 | + '[cheshire.core :as json] |
| 22 | + '[clojure.edn :as edn] |
| 23 | + '[clojure.string :as str] |
| 24 | + '[clojure.pprint :as pp]) |
| 25 | + |
| 26 | +(def ollama-url (or (System/getenv "OLLAMA_URL") "http://localhost:11434")) |
| 27 | +(def default-model "qwen2.5-coder:7b") |
| 28 | + |
| 29 | +;; --------------------------------------------------------------------------- |
| 30 | +;; Catalog: read from spai help |
| 31 | +;; --------------------------------------------------------------------------- |
| 32 | + |
| 33 | +(defn load-catalog |
| 34 | + "Run `spai help` and parse the EDN tool catalog." |
| 35 | + [] |
| 36 | + (let [{:keys [exit out]} (p/shell {:out :string :err :string :continue true} |
| 37 | + "spai" "help")] |
| 38 | + (when (zero? exit) |
| 39 | + ;; spai help outputs a header line then EDN. Find the opening { |
| 40 | + (let [edn-start (str/index-of out "{")] |
| 41 | + (when edn-start |
| 42 | + (edn/read-string (subs out edn-start))))))) |
| 43 | + |
| 44 | +(defn catalog->prompt-text |
| 45 | + "Turn the tool catalog into a compact text block for the LLM." |
| 46 | + [catalog] |
| 47 | + (str/join "\n" |
| 48 | + (for [[cmd-key info] (sort-by key catalog) |
| 49 | + :when (not= cmd-key :search)] ; don't recommend ourselves |
| 50 | + (let [name (name cmd-key) |
| 51 | + desc (or (:returns info) "") |
| 52 | + args (or (:args info) "") |
| 53 | + example (or (:example info) "")] |
| 54 | + (str name |
| 55 | + (when (seq args) (str " " args)) |
| 56 | + (when (seq desc) (str " — " desc)) |
| 57 | + (when (seq example) (str " (e.g. " example ")"))))))) |
| 58 | + |
| 59 | +;; --------------------------------------------------------------------------- |
| 60 | +;; System prompt — deliberately minimal |
| 61 | +;; --------------------------------------------------------------------------- |
| 62 | + |
| 63 | +(defn build-system-prompt [catalog-text] |
| 64 | + (str |
| 65 | +"You are a tool recommender for `spai`, a code exploration CLI for LLM agents. |
| 66 | +Given a natural language question, return the best matching spai command(s). |
| 67 | +
|
| 68 | +Output ONLY a valid EDN vector of maps. No explanation, no markdown, no prose. |
| 69 | +
|
| 70 | +Each map must have: |
| 71 | + :command — the spai command name (string) |
| 72 | + :invocation — exact command line to run (string) |
| 73 | + :why — one sentence explaining the match (string) |
| 74 | +
|
| 75 | +Return 1-3 matches, best first. If unsure, return your best guess. |
| 76 | +
|
| 77 | +## Available commands |
| 78 | +
|
| 79 | +" catalog-text " |
| 80 | +
|
| 81 | +Output ONLY the EDN vector.")) |
| 82 | + |
| 83 | +;; --------------------------------------------------------------------------- |
| 84 | +;; Ollama |
| 85 | +;; --------------------------------------------------------------------------- |
| 86 | + |
| 87 | +(defn ollama-chat [model system-prompt user-prompt] |
| 88 | + (let [resp (http/post (str ollama-url "/api/chat") |
| 89 | + {:headers {"Content-Type" "application/json"} |
| 90 | + :body (json/generate-string |
| 91 | + {:model model |
| 92 | + :messages [{:role "system" :content system-prompt} |
| 93 | + {:role "user" :content user-prompt}] |
| 94 | + :stream false |
| 95 | + :options {:temperature 0.1 |
| 96 | + :num_predict 256}}) |
| 97 | + :throw false |
| 98 | + :timeout 30000}) |
| 99 | + body (json/parse-string (:body resp) true)] |
| 100 | + {:content (get-in body [:message :content]) |
| 101 | + :model (:model body) |
| 102 | + :prompt_tokens (get-in body [:prompt_eval_count]) |
| 103 | + :completion_tokens (get-in body [:eval_count]) |
| 104 | + :total_ms (some-> (get-in body [:total_duration]) (/ 1e6) long) |
| 105 | + :tokens_per_sec (when-let [eval-count (get-in body [:eval_count])] |
| 106 | + (when-let [eval-dur (get-in body [:eval_duration])] |
| 107 | + (when (pos? eval-dur) |
| 108 | + (-> (/ (* eval-count 1e9) eval-dur) (Math/round) (/ 1.0)))))})) |
| 109 | + |
| 110 | +(defn extract-edn |
| 111 | + "Extract EDN from LLM response, stripping markdown fencing." |
| 112 | + [text] |
| 113 | + (let [text (str/trim (or text ""))] |
| 114 | + (cond |
| 115 | + (str/starts-with? text "```") |
| 116 | + (let [lines (str/split-lines text) |
| 117 | + inner (drop 1 (butlast lines))] |
| 118 | + (str/join "\n" inner)) |
| 119 | + :else text))) |
| 120 | + |
| 121 | +;; --------------------------------------------------------------------------- |
| 122 | +;; Main |
| 123 | +;; --------------------------------------------------------------------------- |
| 124 | + |
| 125 | +(let [args *command-line-args* |
| 126 | + model (atom default-model) |
| 127 | + query-args (atom [])] |
| 128 | + |
| 129 | + ;; Parse args |
| 130 | + (loop [args args] |
| 131 | + (when (seq args) |
| 132 | + (cond |
| 133 | + (= (first args) "--model") |
| 134 | + (do (reset! model (second args)) |
| 135 | + (recur (drop 2 args))) |
| 136 | + |
| 137 | + (contains? #{"--help" "-h"} (first args)) |
| 138 | + (do |
| 139 | + (println "Usage: spai search \"your question here\"") |
| 140 | + (println " spai search --model qwen2.5-coder:3b \"who uses this file\"") |
| 141 | + (println) |
| 142 | + (println "Searches spai's tool catalog using natural language via local Ollama.") |
| 143 | + (println "Returns the best matching command(s) with invocation examples.") |
| 144 | + (System/exit 0)) |
| 145 | + |
| 146 | + :else |
| 147 | + (do (swap! query-args conj (first args)) |
| 148 | + (recur (rest args)))))) |
| 149 | + |
| 150 | + (when (empty? @query-args) |
| 151 | + (println "Usage: spai search \"your question here\"") |
| 152 | + (System/exit 1)) |
| 153 | + |
| 154 | + ;; Load tool catalog |
| 155 | + (let [catalog (load-catalog)] |
| 156 | + (when-not catalog |
| 157 | + (binding [*out* *err*] |
| 158 | + (println "Error: could not load spai tool catalog (is spai installed?)")) |
| 159 | + (System/exit 1)) |
| 160 | + |
| 161 | + (let [catalog-text (catalog->prompt-text catalog) |
| 162 | + user-query (str/join " " @query-args) |
| 163 | + system-prompt (build-system-prompt catalog-text)] |
| 164 | + |
| 165 | + ;; Query ollama |
| 166 | + (binding [*out* *err*] |
| 167 | + (print (str "Searching (" @model ")... ")) |
| 168 | + (flush)) |
| 169 | + |
| 170 | + (let [result (ollama-chat @model system-prompt user-query) |
| 171 | + raw (extract-edn (:content result))] |
| 172 | + |
| 173 | + (binding [*out* *err*] |
| 174 | + (println (str "done. (" |
| 175 | + (:total_ms result) "ms, " |
| 176 | + (:prompt_tokens result) "→" (:completion_tokens result) " tokens, " |
| 177 | + (:tokens_per_sec result) " tok/s)"))) |
| 178 | + |
| 179 | + ;; Try to parse as EDN, fall back to raw |
| 180 | + (let [parsed (try (edn/read-string raw) (catch Exception _ nil))] |
| 181 | + (if (and parsed (or (vector? parsed) (seq? parsed))) |
| 182 | + (pp/pprint parsed) |
| 183 | + ;; Fallback: print raw and wrap in error |
| 184 | + (pp/pprint {:error "Could not parse LLM response as EDN" |
| 185 | + :raw raw |
| 186 | + :query user-query}))))))) |
0 commit comments