Skip to content

Commit 277dbeb

Browse files
GaetanLepagenix-infra-bot
authored andcommitted
plugins/codecompanion: init
1 parent d4ae1e3 commit 277dbeb

File tree

2 files changed

+1244
-0
lines changed

2 files changed

+1244
-0
lines changed
Lines changed: 259 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,259 @@
1+
{ lib, ... }:
2+
let
3+
inherit (lib.nixvim)
4+
defaultNullOpts
5+
mkNullOrStr'
6+
mkNullOrStr
7+
;
8+
inherit (lib) types;
9+
in
10+
lib.nixvim.neovim-plugin.mkNeovimPlugin {
11+
name = "codecompanion";
12+
originalName = "codecompanion.nvim";
13+
package = "codecompanion-nvim";
14+
15+
maintainers = [ lib.maintainers.GaetanLepage ];
16+
17+
settingsOptions = {
18+
adapters = defaultNullOpts.mkAttrsOf' {
19+
type = types.anything;
20+
pluginDefault = {
21+
anthropic = "anthropic";
22+
azure_openai = "azure_openai";
23+
copilot = "copilot";
24+
gemini = "gemini";
25+
ollama = "ollama";
26+
openai = "openai";
27+
xai = "xai";
28+
non_llms = {
29+
jina = "jina";
30+
};
31+
opts = {
32+
allow_insecure = false;
33+
proxy = null;
34+
};
35+
};
36+
example = {
37+
openai.__raw = ''
38+
function()
39+
return require("codecompanion.adapters").extend("openai", {
40+
schema = {
41+
model = {
42+
default = "gpt-4o"
43+
}
44+
}
45+
})
46+
end
47+
'';
48+
ollama.__raw = ''
49+
function()
50+
return require("codecompanion.adapters").extend("ollama", {
51+
schema = {
52+
model = {
53+
default = "llama3:latest"
54+
}
55+
}
56+
})
57+
end
58+
'';
59+
};
60+
description = ''
61+
In CodeCompanion, adapters are interfaces that act as a bridge between the plugin's
62+
functionality and an LLM.
63+
64+
Refer to the [documentation](https://github.com/olimorris/codecompanion.nvim/blob/main/doc/ADAPTERS.md)
65+
to learn about the adapters spec.
66+
'';
67+
};
68+
69+
strategies = defaultNullOpts.mkAttrsOf' {
70+
type = types.anything;
71+
pluginDefault = lib.literalExpression "See upstream documentation";
72+
example = {
73+
chat.adapter = "ollama";
74+
inline.adapter = "ollama";
75+
agent.adapter = "ollama";
76+
};
77+
description = ''
78+
The plugin utilises objects called Strategies.
79+
These are the different ways that a user can interact with the plugin.
80+
- The _chat_ strategy harnesses a buffer to allow direct conversation with the LLM.
81+
- The _inline_ strategy allows for output from the LLM to be written directly into a
82+
pre-existing Neovim buffer.
83+
- The _agent_ and _workflow_ strategies are wrappers for the _chat_ strategy, allowing
84+
for tool use and agentic workflows.
85+
'';
86+
};
87+
88+
prompt_library = defaultNullOpts.mkAttrsOf' {
89+
type = types.submodule {
90+
freeformType = with types; attrsOf anything;
91+
options = {
92+
strategy = mkNullOrStr ''
93+
The plugin utilises objects called Strategies.
94+
These are the different ways that a user can interact with the plugin.
95+
- The _chat_ strategy harnesses a buffer to allow direct conversation with the LLM.
96+
- The _inline_ strategy allows for output from the LLM to be written directly into a
97+
pre-existing Neovim buffer.
98+
- The _agent_ and _workflow_ strategies are wrappers for the _chat_ strategy, allowing
99+
for tool use and agentic workflows.
100+
'';
101+
102+
description = mkNullOrStr' {
103+
description = ''
104+
A description for this recipe.
105+
'';
106+
example = "Explain the LSP diagnostics for the selected code";
107+
};
108+
};
109+
};
110+
pluginDefault = lib.literalExpression "See upstream documentation";
111+
example = {
112+
"Custom Prompt" = {
113+
strategy = "inline";
114+
description = "Prompt the LLM from Neovim";
115+
opts = {
116+
index = 3;
117+
is_default = true;
118+
is_slash_cmd = false;
119+
user_prompt = true;
120+
};
121+
prompts = [
122+
{
123+
role.__raw = "system";
124+
content.__raw = ''
125+
function(context)
126+
return fmt(
127+
[[I want you to act as a senior %s developer. I will ask you specific questions and I want you to return raw code only (no codeblocks and no explanations). If you can't respond with code, respond with nothing]],
128+
context.filetype
129+
)
130+
end
131+
'';
132+
opts = {
133+
visible = false;
134+
tag = "system_tag";
135+
};
136+
}
137+
];
138+
};
139+
"Generate a Commit Message" = {
140+
strategy = "chat";
141+
description = "Generate a commit message";
142+
opts = {
143+
index = 10;
144+
is_default = true;
145+
is_slash_cmd = true;
146+
short_name = "commit";
147+
auto_submit = true;
148+
};
149+
prompts = [
150+
{
151+
role = "user";
152+
content.__raw = ''
153+
function()
154+
return fmt(
155+
[[You are an expert at following the Conventional Commit specification. Given the git diff listed below, please generate a commit message for me:
156+
157+
```diff
158+
%s
159+
```
160+
]],
161+
vim.fn.system("git diff --no-ext-diff --staged")
162+
)
163+
end
164+
'';
165+
opts = {
166+
contains_code = true;
167+
};
168+
}
169+
];
170+
};
171+
};
172+
description = ''
173+
The plugin comes with a number of pre-built prompts.
174+
175+
As per the config, these can be called via keymaps or via the cmdline.
176+
These prompts have been carefully curated to mimic those in GitHub's Copilot Chat.
177+
178+
Of course, you can create your own prompts and add them to the Action Palette or even to the
179+
slash command completion menu in the chat buffer.
180+
Please see the [RECIPES](https://github.com/olimorris/codecompanion.nvim/blob/main/doc/RECIPES.md)
181+
guide for more information.
182+
'';
183+
};
184+
185+
display = defaultNullOpts.mkNullable' {
186+
type = types.submodule {
187+
freeformType = with types; attrsOf anything;
188+
};
189+
example = {
190+
display = {
191+
action_palette = {
192+
provider = "default";
193+
opts.show_default_prompt_library = true;
194+
};
195+
chat = {
196+
window = {
197+
layout = "vertical";
198+
opts.breakindent = true;
199+
};
200+
};
201+
};
202+
};
203+
pluginDefault = lib.literalExpression "See upstream documentation";
204+
description = ''
205+
Appearance settings.
206+
'';
207+
};
208+
209+
opts = defaultNullOpts.mkNullable' {
210+
type = types.submodule {
211+
freeformType = with types; attrsOf anything;
212+
};
213+
description = ''
214+
General settings for the plugin.
215+
'';
216+
example = {
217+
log_level = "TRACE";
218+
send_code = true;
219+
use_default_actions = true;
220+
use_default_prompts = true;
221+
};
222+
pluginDefault = lib.literalExpression "See upstream documentation";
223+
};
224+
};
225+
226+
settingsExample = {
227+
adapters = {
228+
ollama.__raw = ''
229+
function()
230+
return require('codecompanion.adapters').extend('ollama', {
231+
env = {
232+
url = "http://127.0.0.1:11434",
233+
},
234+
schema = {
235+
model = {
236+
default = 'qwen2.5-coder:latest',
237+
-- default = "llama3.1:8b-instruct-q8_0",
238+
},
239+
num_ctx = {
240+
default = 32768,
241+
},
242+
},
243+
})
244+
end
245+
'';
246+
};
247+
strategies = {
248+
chat.adapter = "ollama";
249+
inline.adapter = "ollama";
250+
agent.adapter = "ollama";
251+
};
252+
opts = {
253+
log_level = "TRACE";
254+
send_code = true;
255+
use_default_actions = true;
256+
use_default_prompts = true;
257+
};
258+
};
259+
}

0 commit comments

Comments
 (0)