ollama setup for neovim

This commit is contained in:
Barak Michener 2024-09-21 13:42:04 -07:00
parent e49b11da27
commit 88ab0c75e4
4 changed files with 58 additions and 12 deletions

View file

@ -0,0 +1,37 @@
require("avante_lib").load()
require("avante").setup({
provider = "ollama",
auto_suggestions_provider = "ollama",
behavior = {
auto_suggestions = false,
},
hints = {
enabled = false,
},
vendors = {
---@type AvanteProvider
ollama = {
["local"] = true,
endpoint = "127.0.0.1:11434/v1",
model = "qwen2.5-coder:7b",
parse_curl_args = function(opts, code_opts)
return {
url = opts.endpoint .. "/chat/completions",
headers = {
["Accept"] = "application/json",
["Content-Type"] = "application/json",
},
body = {
model = opts.model,
messages = require("avante.providers").copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
max_tokens = 2048,
stream = true,
},
}
end,
parse_response_data = function(data_stream, event_state, opts)
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
end,
},
},
})

View file

@ -1,7 +1,10 @@
require("ollama").setup({
model = "codellama",
url = "http://ollama.ollama.svc.k8s.barakmich.com",
serve = {
on_start = false,
},
model = "codellama:34b",
url = "http://daystrom.home.0b100.net:11434",
serve = {
on_start = false,
},
})
vim.keymap.set("n", "<Leader>,,", ":<c-u>lua require('ollama').prompt()<cr>", { silent = true })
vim.keymap.set("v", "<Leader>,,", ":<c-u>lua require('ollama').prompt()<cr>", { silent = true })

View file

@ -624,6 +624,8 @@ end
require("hoversplit").setup({})
--require("murdock")
require("ollama_setup")
--require("ollama_setup")
--require("gen_setup")
require("lir_setup")
require("lualine_setup")
require("avante_setup")