diff --git a/README.md b/README.md index e056060..026d49b 100644 --- a/README.md +++ b/README.md @@ -81,8 +81,8 @@ _See [config.lua#L9](./lua/avante/config.lua) for the full config_ ```lua { - ---@alias Provider "openai" | "claude" | "azure" | "deepseek" - provider = "claude", -- "claude" or "openai" or "azure" or "deepseek" + ---@alias Provider "openai" | "claude" | "azure" | "deepseek" | "groq" + provider = "claude", -- "claude" or "openai" or "azure" or "deepseek" or "groq" openai = { endpoint = "https://api.openai.com", model = "gpt-4o", @@ -173,6 +173,12 @@ Given its early stage, `avante.nvim` currently supports the following basic func > ```sh > export DEEPSEEK_API_KEY=you-api-key > ``` +> +> For Groq +> +> ```sh +> export GROQ_API_KEY=you-api-key +> ``` 1. Open a code file in Neovim. 2. Use the `:AvanteAsk` command to query the AI about the code. diff --git a/lua/avante/ai_bot.lua b/lua/avante/ai_bot.lua index 3db1a98..f06c2d9 100644 --- a/lua/avante/ai_bot.lua +++ b/lua/avante/ai_bot.lua @@ -18,6 +18,7 @@ local E = { claude = "ANTHROPIC_API_KEY", azure = "AZURE_OPENAI_API_KEY", deepseek = "DEEPSEEK_API_KEY", + groq = "GROQ_API_KEY", }, _once = false, } @@ -316,6 +317,23 @@ local function call_openai_api_stream(question, code_lang, code_content, selecte max_tokens = Config.deepseek.max_tokens, stream = true, } + elseif Config.provider == "groq" then + api_key = os.getenv(E.key("groq")) + url = Utils.trim_suffix(Config.groq.endpoint, "/") .. "/openai/v1/chat/completions" + headers = { + ["Content-Type"] = "application/json", + ["Authorization"] = "Bearer " .. api_key, + } + body = { + model = Config.groq.model, + messages = { + { role = "system", content = system_prompt }, + { role = "user", content = user_prompt }, + }, + temperature = Config.groq.temperature, + max_tokens = Config.groq.max_tokens, + stream = true, + } else url = Utils.trim_suffix(Config.openai.endpoint, "/") .. "/v1/chat/completions" headers = { @@ -382,7 +400,12 @@ end ---@param on_chunk fun(chunk: string): any ---@param on_complete fun(err: string|nil): any function M.call_ai_api_stream(question, code_lang, code_content, selected_content_content, on_chunk, on_complete) - if Config.provider == "openai" or Config.provider == "azure" or Config.provider == "deepseek" then + if + Config.provider == "openai" + or Config.provider == "azure" + or Config.provider == "deepseek" + or Config.provider == "groq" + then call_openai_api_stream(question, code_lang, code_content, selected_content_content, on_chunk, on_complete) elseif Config.provider == "claude" then call_claude_api_stream(question, code_lang, code_content, selected_content_content, on_chunk, on_complete) diff --git a/lua/avante/config.lua b/lua/avante/config.lua index 325dde9..53c65e0 100644 --- a/lua/avante/config.lua +++ b/lua/avante/config.lua @@ -6,8 +6,8 @@ local M = {} ---@class avante.Config M.defaults = { - ---@alias Provider "openai" | "claude" | "azure" | "deepseek" - provider = "claude", -- "claude" or "openai" or "azure" or "deepseek" + ---@alias Provider "openai" | "claude" | "azure" | "deepseek" | "groq" + provider = "claude", -- "claude" or "openai" or "azure" or "deepseek" or "groq" openai = { endpoint = "https://api.openai.com", model = "gpt-4o", @@ -33,6 +33,12 @@ M.defaults = { temperature = 0, max_tokens = 4096, }, + groq = { + endpoint = "https://api.groq.com", + model = "llama-3.1-70b-versatile", + temperature = 0, + max_tokens = 4096, + }, behaviour = { auto_apply_diff_after_generation = false, -- Whether to automatically apply diff after LLM response. },