diff --git a/dot_vim/lua/avante_setup.lua b/dot_vim/lua/avante_setup.lua index 5c51fbf..effb86b 100644 --- a/dot_vim/lua/avante_setup.lua +++ b/dot_vim/lua/avante_setup.lua @@ -1,37 +1,19 @@ require("avante_lib").load() require("avante").setup({ - provider = "ollama", - auto_suggestions_provider = "ollama", - behavior = { - auto_suggestions = false, - }, - hints = { - enabled = false, - }, - vendors = { - ---@type AvanteProvider - ollama = { - ["local"] = true, - endpoint = "127.0.0.1:11434/v1", - model = "qwen2.5-coder:7b", - parse_curl_args = function(opts, code_opts) - return { - url = opts.endpoint .. "/chat/completions", - headers = { - ["Accept"] = "application/json", - ["Content-Type"] = "application/json", - }, - body = { - model = opts.model, - messages = require("avante.providers").copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced - max_tokens = 2048, - stream = true, - }, - } - end, - parse_response_data = function(data_stream, event_state, opts) - require("avante.providers").openai.parse_response(data_stream, event_state, opts) - end, - }, - }, + provider = "ollama", + auto_suggestions_provider = "ollama", + behavior = { + auto_suggestions = false, + }, + hints = { + enabled = false, + }, + vendors = { + ollama = { + endpoint = "http://127.0.0.1:11434/v1", + model = "qwen2.5-coder:7b", + __inherited_from = "openai", + api_key_name = "", + }, + }, })