ollama setup for neovim

This commit is contained in:
Barak Michener 2024-09-21 13:42:04 -07:00
parent e49b11da27
commit 88ab0c75e4
4 changed files with 58 additions and 12 deletions

View file

@ -0,0 +1,37 @@
require("avante_lib").load()
require("avante").setup({
provider = "ollama",
auto_suggestions_provider = "ollama",
behavior = {
auto_suggestions = false,
},
hints = {
enabled = false,
},
vendors = {
---@type AvanteProvider
ollama = {
["local"] = true,
endpoint = "127.0.0.1:11434/v1",
model = "qwen2.5-coder:7b",
parse_curl_args = function(opts, code_opts)
return {
url = opts.endpoint .. "/chat/completions",
headers = {
["Accept"] = "application/json",
["Content-Type"] = "application/json",
},
body = {
model = opts.model,
messages = require("avante.providers").copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
max_tokens = 2048,
stream = true,
},
}
end,
parse_response_data = function(data_stream, event_state, opts)
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
end,
},
},
})