nvim: Add simple LLM assisted programming
Added codecompanion as a test for LLM assistance during programming. Will see how much I end up using it.
This commit is contained in:
parent
0f77a110d3
commit
1848ec168b
1 changed files with 49 additions and 0 deletions
49
nvim/.config/nvim/lua/plugins/llm.lua
Normal file
49
nvim/.config/nvim/lua/plugins/llm.lua
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
return {
|
||||
-- TODO: Add completion w blink, see https://codecompanion.olimorris.dev/installation.html
|
||||
{
|
||||
"olimorris/codecompanion.nvim",
|
||||
dependencies = {
|
||||
"nvim-lua/plenary.nvim",
|
||||
"nvim-treesitter/nvim-treesitter",
|
||||
"github/copilot.vim",
|
||||
},
|
||||
opts = {
|
||||
strategies = {
|
||||
chat = { adapter = "groq" },
|
||||
inline = { adapter = "groq" },
|
||||
},
|
||||
adapters = {
|
||||
groq = function()
|
||||
return require("codecompanion.adapters").extend("openai", {
|
||||
env = {
|
||||
api_key = "GROQ_API_KEY",
|
||||
},
|
||||
name = "Groq",
|
||||
url = "https://api.groq.com/openai/v1/chat/completions",
|
||||
schema = {
|
||||
model = {
|
||||
default = "llama-3.1-8b-instant",
|
||||
choices = {
|
||||
"llama-3.3-70b-versatile",
|
||||
"mixtral-8x7b-32768",
|
||||
},
|
||||
},
|
||||
},
|
||||
max_tokens = {
|
||||
default = 4096,
|
||||
},
|
||||
temperature = {
|
||||
default = 1,
|
||||
},
|
||||
})
|
||||
end,
|
||||
},
|
||||
},
|
||||
keys = {
|
||||
{ "<leader>aa", "<cmd>CodeCompanionActions<cr>", desc = "Actions", silent = true, mode = { "n", "v" } },
|
||||
{ "<leader>ac", "<cmd>CodeCompanionChat Toggle<cr>", desc = "Toggle chat", silent = true },
|
||||
{ "<leader>ac", "<cmd>CodeCompanionChat Add<cr>", desc = "Add to chat", silent = true, mode = "v" },
|
||||
},
|
||||
cmd = { "CodeCompanionActions", "CodeCompanionChat", "CodeCompanion", "CodeCompanionCmd" },
|
||||
},
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue