diff --git a/lua/avante/config.lua b/lua/avante/config.lua index 123ac11..2c48118 100644 --- a/lua/avante/config.lua +++ b/lua/avante/config.lua @@ -10,7 +10,7 @@ local M = {} ---@class avante.Config M._defaults = { debug = false, - ---@alias Provider "claude" | "openai" | "azure" | "gemini" | "vertex" | "cohere" | "copilot" | string + ---@alias Provider "claude" | "openai" | "azure" | "gemini" | "vertex" | "cohere" | "copilot" | "baidu" | "ollama" | string provider = "claude", -- Only recommend using Claude -- WARNING: Since auto-suggestions are a high-frequency operation and therefore expensive, -- currently designating it as `copilot` provider is dangerous because: https://github.com/yetone/avante.nvim/issues/1048 @@ -130,6 +130,17 @@ M._defaults = { temperature = 0, max_tokens = 4096, }, + ---@type AvanteSupportedProvider + baidu = { + endpoint = "https://your-baidu-endpoint.com", + model = "deepseek-v3", + appid = "your-appid", -- 假设需要 appid + api_key_name = "BAIDU_API_KEY", + auto_suggestion_provider = "baidu", -- 根据实际情况设置 + timeout = 30000, -- Timeout in milliseconds + temperature = 0, + max_tokens = 4096, + }, ---To add support for custom provider, follow the format below ---See https://github.com/yetone/avante.nvim/wiki#custom-providers for more details ---@type {[string]: AvanteProvider} @@ -150,6 +161,14 @@ M._defaults = { temperature = 0, max_tokens = 8000, }, + ---@type AvanteSupportedProvider + ["ollama"] = { + __inherited_from = "openai", + model = "myDeepseek7b", + timeout = 30000, -- Timeout in milliseconds + temperature = 0, + max_tokens = 8000, + }, }, ---Specify the special dual_boost mode ---1. enabled: Whether to enable dual_boost mode. Default to false. diff --git a/lua/avante/providers/openai.lua b/lua/avante/providers/openai.lua index 0543046..1b4a44a 100644 --- a/lua/avante/providers/openai.lua +++ b/lua/avante/providers/openai.lua @@ -274,6 +274,7 @@ M.parse_response_without_stream = function(data, _, opts) end end +local Log = require("avante.utils.log") M.parse_curl_args = function(provider, prompt_opts) local base, body_opts = P.parse_config(provider) local disable_tools = base.disable_tools or false @@ -315,7 +316,7 @@ M.parse_curl_args = function(provider, prompt_opts) Utils.debug("endpoint", base.endpoint) Utils.debug("model", base.model) - return { + local request = { url = Utils.url_join(base.endpoint, "/chat/completions"), proxy = base.proxy, insecure = base.allow_insecure, @@ -327,6 +328,7 @@ M.parse_curl_args = function(provider, prompt_opts) tools = tools, }, body_opts), } + -- 记录请求详细信息 + Log.log_request(request.url, request.headers, request.body) end - return M