ollama setup for neovim
This commit is contained in:
parent
e49b11da27
commit
88ab0c75e4
4 changed files with 58 additions and 12 deletions
37
dot_vim/lua/avante_setup.lua
Normal file
37
dot_vim/lua/avante_setup.lua
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
require("avante_lib").load()
|
||||
require("avante").setup({
|
||||
provider = "ollama",
|
||||
auto_suggestions_provider = "ollama",
|
||||
behavior = {
|
||||
auto_suggestions = false,
|
||||
},
|
||||
hints = {
|
||||
enabled = false,
|
||||
},
|
||||
vendors = {
|
||||
---@type AvanteProvider
|
||||
ollama = {
|
||||
["local"] = true,
|
||||
endpoint = "127.0.0.1:11434/v1",
|
||||
model = "qwen2.5-coder:7b",
|
||||
parse_curl_args = function(opts, code_opts)
|
||||
return {
|
||||
url = opts.endpoint .. "/chat/completions",
|
||||
headers = {
|
||||
["Accept"] = "application/json",
|
||||
["Content-Type"] = "application/json",
|
||||
},
|
||||
body = {
|
||||
model = opts.model,
|
||||
messages = require("avante.providers").copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
|
||||
max_tokens = 2048,
|
||||
stream = true,
|
||||
},
|
||||
}
|
||||
end,
|
||||
parse_response_data = function(data_stream, event_state, opts)
|
||||
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
||||
end,
|
||||
},
|
||||
},
|
||||
})
|
||||
Loading…
Add table
Add a link
Reference in a new issue