Skip to content

Commit 5574515

Browse files
committed
plugins/ollama: init
1 parent f0b487a commit 5574515

File tree

3 files changed

+284
-0
lines changed

3 files changed

+284
-0
lines changed

plugins/default.nix

+1
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@
141141
./utils/nvim-osc52.nix
142142
./utils/nvim-ufo.nix
143143
./utils/oil.nix
144+
./utils/ollama.nix
144145
./utils/persistence.nix
145146
./utils/project-nvim.nix
146147
./utils/refactoring.nix

plugins/utils/ollama.nix

+234
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,234 @@
1+
{
2+
lib,
3+
helpers,
4+
config,
5+
pkgs,
6+
...
7+
}:
8+
with lib; let
9+
cfg = config.plugins.ollama;
10+
11+
actionOptionType = with helpers.nixvimTypes;
12+
oneOf [
13+
rawLua
14+
(enum [
15+
"display"
16+
"replace"
17+
"insert"
18+
"display_replace"
19+
"display_insert"
20+
"display_prompt"
21+
])
22+
(submodule {
23+
options = {
24+
fn = helpers.mkNullOrStrLuaFnOr (enum [false]) ''
25+
fun(prompt: table): Ollama.PromptActionResponseCallback
26+
27+
Example:
28+
```lua
29+
function(prompt)
30+
-- This function is called when the prompt is selected
31+
-- just before sending the prompt to the LLM.
32+
-- Useful for setting up UI or other state.
33+
34+
-- Return a function that will be used as a callback
35+
-- when a response is received.
36+
---@type Ollama.PromptActionResponseCallback
37+
return function(body, job)
38+
-- body is a table of the json response
39+
-- body.response is the response text received
40+
41+
-- job is the plenary.job object when opts.stream = true
42+
-- job is nil otherwise
43+
end
44+
45+
end
46+
```
47+
'';
48+
49+
opts = {
50+
stream = helpers.defaultNullOpts.mkBool false ''
51+
Whether to stream the response.
52+
'';
53+
};
54+
};
55+
})
56+
];
57+
in {
58+
meta.maintainers = [maintainers.GaetanLepage];
59+
60+
options.plugins.ollama =
61+
helpers.extraOptionsOptions
62+
// {
63+
enable = mkEnableOption "ollama.nvim";
64+
65+
package = helpers.mkPackageOption "ollama.nvim" pkgs.vimPlugins.ollama-nvim;
66+
67+
model = helpers.defaultNullOpts.mkStr "mistral" ''
68+
The default model to use.
69+
'';
70+
71+
prompts = let
72+
promptOptions = {
73+
prompt = mkOption {
74+
type = with helpers.nixvimTypes; maybeRaw str;
75+
description = ''
76+
The prompt to send to the model.
77+
78+
Replaces the following tokens:
79+
- `$input`: The input from the user
80+
- `$sel`: The currently selected text
81+
- `$ftype`: The filetype of the current buffer
82+
- `$fname`: The filename of the current buffer
83+
- `$buf`: The contents of the current buffer
84+
- `$line`: The current line in the buffer
85+
- `$lnum`: The current line number in the buffer
86+
'';
87+
};
88+
89+
inputLabel = helpers.defaultNullOpts.mkStr "> " ''
90+
The label to use for an input field.
91+
'';
92+
93+
action = helpers.mkNullOrOption actionOptionType ''
94+
How to handle the output.
95+
96+
See [here](https://github.com/nomnivore/ollama.nvim/tree/main#actions) for more details.
97+
98+
Defaults to the value of `plugins.ollama.action`.
99+
'';
100+
101+
model = helpers.mkNullOrOption (with helpers.nixvimTypes; maybeRaw str) ''
102+
The model to use for this prompt.
103+
104+
Defaults to the value of `plugins.ollama.model`.
105+
'';
106+
107+
extract =
108+
helpers.defaultNullOpts.mkNullable
109+
(
110+
with helpers.nixvimTypes;
111+
maybeRaw
112+
(
113+
either
114+
str
115+
(enum [false])
116+
)
117+
)
118+
"```$ftype\n(.-)```"
119+
''
120+
A `string.match` pattern to use for an Action to extract the output from the response
121+
(Insert/Replace).
122+
'';
123+
124+
options = helpers.mkNullOrOption (with types; attrsOf anything) ''
125+
Additional model parameters, such as temperature, listed in the documentation for the [Modelfile](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
126+
'';
127+
128+
system = helpers.mkNullOrOption (helpers.nixvimTypes.maybeRaw types.str) ''
129+
The SYSTEM instruction specifies the system prompt to be used in the Modelfile template,
130+
if applicable.
131+
(overrides what's in the Modelfile).
132+
'';
133+
134+
format = helpers.defaultNullOpts.mkEnumFirstDefault ["json"] ''
135+
The format to return a response in.
136+
Currently the only accepted value is `"json"`.
137+
'';
138+
};
139+
140+
processPrompt = prompt:
141+
if isAttrs prompt
142+
then {
143+
inherit (prompt) prompt;
144+
input_label = prompt.inputLabel;
145+
inherit
146+
(prompt)
147+
action
148+
model
149+
extract
150+
options
151+
system
152+
format
153+
;
154+
}
155+
else prompt;
156+
in
157+
mkOption {
158+
type = with types;
159+
attrsOf (
160+
either
161+
(submodule {
162+
options = promptOptions;
163+
})
164+
(enum [false])
165+
);
166+
default = {};
167+
apply = v: mapAttrs (_: processPrompt) v;
168+
description = ''
169+
A table of prompts to use for each model.
170+
Default prompts are defined [here](https://github.com/nomnivore/ollama.nvim/blob/main/lua/ollama/prompts.lua).
171+
'';
172+
};
173+
174+
action = helpers.defaultNullOpts.mkNullable actionOptionType "display" ''
175+
How to handle prompt outputs when not specified by prompt.
176+
177+
See [here](https://github.com/nomnivore/ollama.nvim/tree/main#actions) for more details.
178+
'';
179+
180+
url = helpers.defaultNullOpts.mkStr "http://127.0.0.1:11434" ''
181+
The url to use to connect to the ollama server.
182+
'';
183+
184+
serve = {
185+
onStart = helpers.defaultNullOpts.mkBool false ''
186+
Whether to start the ollama server on startup.
187+
'';
188+
189+
command = helpers.defaultNullOpts.mkStr "ollama" ''
190+
The command to use to start the ollama server.
191+
'';
192+
193+
args = helpers.defaultNullOpts.mkListOf types.str ''["serve"]'' ''
194+
The arguments to pass to the serve command.
195+
'';
196+
197+
stopCommand = helpers.defaultNullOpts.mkStr "pkill" ''
198+
The command to use to stop the ollama server.
199+
'';
200+
201+
stopArgs = helpers.defaultNullOpts.mkListOf types.str ''["-SIGTERM" "ollama"]'' ''
202+
The arguments to pass to the stop command.
203+
'';
204+
};
205+
};
206+
207+
config = mkIf cfg.enable {
208+
extraPlugins = [cfg.package];
209+
210+
extraConfigLua = let
211+
setupOptions = with cfg;
212+
{
213+
inherit
214+
model
215+
prompts
216+
action
217+
url
218+
;
219+
serve = with serve; {
220+
on_start = onStart;
221+
inherit
222+
command
223+
args
224+
;
225+
stop_command = stopCommand;
226+
stop_args = stopArgs;
227+
};
228+
}
229+
// cfg.extraOptions;
230+
in ''
231+
require('ollama').setup(${helpers.toLuaObject setupOptions})
232+
'';
233+
};
234+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
{
2+
empty = {
3+
plugins.ollama.enable = true;
4+
};
5+
6+
example = {
7+
plugins.ollama = {
8+
enable = true;
9+
10+
model = "mistral";
11+
prompts = {
12+
# disable prompt
13+
Sample_Prompt = false;
14+
my-prompt = {
15+
prompt = "Hello $input $sel. J'aime le fromage.";
16+
inputLabel = "> ";
17+
action = {
18+
fn = ''
19+
function(prompt)
20+
return function(body, job)
21+
end
22+
end
23+
'';
24+
opts.stream = true;
25+
};
26+
model = "foo";
27+
extract = "```$ftype\n(.-)```";
28+
options = {
29+
mirostat_eta = 0.1;
30+
num_thread = 8;
31+
repeat_last_n = -1;
32+
stop = "arrêt";
33+
};
34+
system = "system";
35+
format = "json";
36+
};
37+
};
38+
action = "display";
39+
url = "http://127.0.0.1:11434";
40+
serve = {
41+
onStart = false;
42+
command = "ollama";
43+
args = ["serve"];
44+
stopCommand = "pkill";
45+
stopArgs = ["-SIGTERM" "ollama"];
46+
};
47+
};
48+
};
49+
}

0 commit comments

Comments
 (0)