2024-08-30 21:44:07 +00:00
|
|
|
return {
|
2024-09-05 07:59:53 +00:00
|
|
|
"yetone/avante.nvim",
|
|
|
|
event = "VeryLazy",
|
|
|
|
lazy = false,
|
|
|
|
version = false, -- set this if you want to always pull the latest change
|
|
|
|
opts = {
|
|
|
|
claude = {
|
|
|
|
["local"] = true,
|
2024-08-30 21:51:47 +00:00
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
-- Begin opts
|
2024-11-17 03:52:58 +00:00
|
|
|
provider = "ol_qwenm",
|
2024-09-05 07:59:53 +00:00
|
|
|
vendors = {
|
|
|
|
---@type AvanteProvider
|
|
|
|
ol_llama = {
|
|
|
|
["local"] = true,
|
|
|
|
endpoint = "127.0.0.1:11434/v1",
|
|
|
|
model = "llama3.1:latest",
|
|
|
|
parse_curl_args = function(opts, code_opts)
|
|
|
|
return {
|
|
|
|
url = opts.endpoint .. "/chat/completions",
|
|
|
|
headers = {
|
|
|
|
["Accept"] = "application/json",
|
|
|
|
["Content-Type"] = "application/json",
|
|
|
|
},
|
|
|
|
body = {
|
|
|
|
model = opts.model,
|
2024-11-17 03:52:58 +00:00
|
|
|
messages = require("avante.providers").copilot.parse_messages(code_opts), -- you can make your own message, but this is very advanced
|
|
|
|
max_tokens = 2048,
|
|
|
|
stream = true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end,
|
|
|
|
parse_response_data = function(data_stream, event_state, opts)
|
|
|
|
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
|
|
|
end,
|
|
|
|
},
|
|
|
|
---@type AvanteProvider
|
|
|
|
ol_qwenl = {
|
|
|
|
["local"] = true,
|
|
|
|
endpoint = "127.0.0.1:11434/v1",
|
|
|
|
model = "qwen2.5-coder:32b",
|
|
|
|
parse_curl_args = function(opts, code_opts)
|
|
|
|
return {
|
|
|
|
url = opts.endpoint .. "/chat/completions",
|
|
|
|
headers = {
|
|
|
|
["Accept"] = "application/json",
|
|
|
|
["Content-Type"] = "application/json",
|
|
|
|
},
|
|
|
|
body = {
|
|
|
|
model = opts.model,
|
|
|
|
messages = require("avante.providers").copilot.parse_messages(code_opts), -- you can make your own message, but this is very advanced
|
2024-09-05 07:59:53 +00:00
|
|
|
max_tokens = 2048,
|
|
|
|
stream = true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end,
|
|
|
|
parse_response_data = function(data_stream, event_state, opts)
|
|
|
|
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
2024-08-30 21:51:47 +00:00
|
|
|
end,
|
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
---@type AvanteProvider
|
|
|
|
ol_llama70b = {
|
|
|
|
["local"] = true,
|
|
|
|
endpoint = "127.0.0.1:11434/v1",
|
|
|
|
model = "llama3.1:70b",
|
|
|
|
parse_curl_args = function(opts, code_opts)
|
|
|
|
return {
|
|
|
|
url = opts.endpoint .. "/chat/completions",
|
|
|
|
headers = {
|
|
|
|
["Accept"] = "application/json",
|
|
|
|
["Content-Type"] = "application/json",
|
|
|
|
},
|
|
|
|
body = {
|
|
|
|
model = opts.model,
|
2024-11-17 03:52:58 +00:00
|
|
|
messages = require("avante.providers").copilot.parse_messages(code_opts), -- you can make your own message, but this is very advanced
|
2024-09-05 07:59:53 +00:00
|
|
|
max_tokens = 2048,
|
|
|
|
stream = true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end,
|
|
|
|
parse_response_data = function(data_stream, event_state, opts)
|
|
|
|
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
2024-08-30 21:51:47 +00:00
|
|
|
end,
|
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
---@type AvanteProvider
|
2024-11-17 03:52:58 +00:00
|
|
|
ol_qwenm = {
|
2024-09-05 07:59:53 +00:00
|
|
|
["local"] = true,
|
|
|
|
endpoint = "127.0.0.1:11434/v1",
|
2024-11-17 03:52:58 +00:00
|
|
|
model = "qwen2.5-coder:14b",
|
2024-09-05 07:59:53 +00:00
|
|
|
parse_curl_args = function(opts, code_opts)
|
|
|
|
return {
|
|
|
|
url = opts.endpoint .. "/chat/completions",
|
|
|
|
headers = {
|
|
|
|
["Accept"] = "application/json",
|
|
|
|
["Content-Type"] = "application/json",
|
|
|
|
},
|
|
|
|
body = {
|
|
|
|
model = opts.model,
|
2024-11-17 03:52:58 +00:00
|
|
|
messages = require("avante.providers").copilot.parse_messages(code_opts), -- you can make your own message, but this is very advanced
|
2024-09-05 07:59:53 +00:00
|
|
|
max_tokens = 2048,
|
|
|
|
stream = true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end,
|
|
|
|
parse_response_data = function(data_stream, event_state, opts)
|
|
|
|
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
2024-08-30 21:51:47 +00:00
|
|
|
end,
|
|
|
|
},
|
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
-- add any opts here
|
|
|
|
},
|
|
|
|
-- if you want to build from source then do `make BUILD_FROM_SOURCE=true`
|
|
|
|
build = "make",
|
|
|
|
-- build = "powershell -ExecutionPolicy Bypass -File Build.ps1 -BuildFromSource false" -- for windows
|
|
|
|
dependencies = {
|
|
|
|
"stevearc/dressing.nvim",
|
|
|
|
"nvim-lua/plenary.nvim",
|
|
|
|
"MunifTanjim/nui.nvim",
|
|
|
|
--- The below dependencies are optional,
|
|
|
|
"nvim-tree/nvim-web-devicons", -- or echasnovski/mini.icons
|
|
|
|
--"zbirenbaum/copilot.lua", -- for providers='copilot'
|
|
|
|
{
|
|
|
|
-- support for image pasting
|
|
|
|
"HakonHarnes/img-clip.nvim",
|
|
|
|
event = "VeryLazy",
|
|
|
|
opts = {
|
|
|
|
-- recommended settings
|
|
|
|
default = {
|
|
|
|
embed_image_as_base64 = false,
|
|
|
|
prompt_for_file_name = false,
|
|
|
|
drag_and_drop = {
|
|
|
|
insert_mode = true,
|
2024-08-30 21:51:47 +00:00
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
-- required for Windows users
|
|
|
|
use_absolute_path = true,
|
2024-08-30 21:51:47 +00:00
|
|
|
},
|
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
-- Make sure to set this up properly if you have lazy=true
|
|
|
|
"MeanderingProgrammer/render-markdown.nvim",
|
|
|
|
opts = {
|
|
|
|
file_types = { "markdown", "Avante" },
|
2024-08-30 21:51:47 +00:00
|
|
|
},
|
2024-09-05 07:59:53 +00:00
|
|
|
ft = { "markdown", "Avante" },
|
2024-08-30 21:51:47 +00:00
|
|
|
},
|
|
|
|
},
|
2024-08-30 21:44:07 +00:00
|
|
|
}
|