feat: enable token counting (#1157)

This commit is contained in:
yetone 2025-02-02 01:27:12 +08:00 committed by GitHub
parent 0a273c2f5d
commit d1286e7bfb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 12 additions and 13 deletions

View File

@ -246,6 +246,7 @@ _See [config.lua#L9](./lua/avante/config.lua) for the full config_
auto_apply_diff_after_generation = false, auto_apply_diff_after_generation = false,
support_paste_from_clipboard = false, support_paste_from_clipboard = false,
minimize_diff = true, -- Whether to remove unchanged lines when applying a code block minimize_diff = true, -- Whether to remove unchanged lines when applying a code block
enable_token_counting = true, -- Whether to enable token counting. Default to true.
}, },
mappings = { mappings = {
--- @class AvanteConflictMappings --- @class AvanteConflictMappings

View File

@ -127,6 +127,7 @@ M._defaults = {
---6. jump_to_result_buffer_on_finish = false, -- Whether to automatically jump to the result buffer after generation ---6. jump_to_result_buffer_on_finish = false, -- Whether to automatically jump to the result buffer after generation
---7. support_paste_from_clipboard : Whether to support pasting image from clipboard. This will be determined automatically based whether img-clip is available or not. ---7. support_paste_from_clipboard : Whether to support pasting image from clipboard. This will be determined automatically based whether img-clip is available or not.
---8. minimize_diff : Whether to remove unchanged lines when applying a code block ---8. minimize_diff : Whether to remove unchanged lines when applying a code block
---9. enable_token_counting : Whether to enable token counting. Default to true.
behaviour = { behaviour = {
auto_focus_sidebar = true, auto_focus_sidebar = true,
auto_suggestions = false, -- Experimental stage auto_suggestions = false, -- Experimental stage
@ -137,6 +138,7 @@ M._defaults = {
jump_result_buffer_on_finish = false, jump_result_buffer_on_finish = false,
support_paste_from_clipboard = false, support_paste_from_clipboard = false,
minimize_diff = true, minimize_diff = true,
enable_token_counting = true,
}, },
history = { history = {
max_tokens = 4096, max_tokens = 4096,

View File

@ -1724,7 +1724,6 @@ function Sidebar:create_input_container(opts)
local transformed_response = "" local transformed_response = ""
local displayed_response = "" local displayed_response = ""
local current_path = "" local current_path = ""
local prev_is_thinking = false
local is_first_chunk = true local is_first_chunk = true
local scroll = true local scroll = true
@ -1758,10 +1757,8 @@ function Sidebar:create_input_container(opts)
local selected_files = self.file_selector:get_selected_files_contents() local selected_files = self.file_selector:get_selected_files_contents()
local transformed = local transformed = transform_result_content(selected_files, transformed_response .. chunk, current_path)
transform_result_content(selected_files, transformed_response .. chunk, current_path, prev_is_thinking)
transformed_response = transformed.content transformed_response = transformed.content
prev_is_thinking = transformed.is_thinking
if transformed.current_filepath and transformed.current_filepath ~= "" then if transformed.current_filepath and transformed.current_filepath ~= "" then
current_path = transformed.current_filepath current_path = transformed.current_filepath
end end
@ -1980,17 +1977,16 @@ function Sidebar:create_input_container(opts)
local function show_hint() local function show_hint()
close_hint() -- Close the existing hint window close_hint() -- Close the existing hint window
local input_value = table.concat(api.nvim_buf_get_lines(self.input_container.bufnr, 0, -1, false), "\n") local hint_text = (fn.mode() ~= "i" and Config.mappings.submit.normal or Config.mappings.submit.insert)
local generate_prompts_options = get_generate_prompts_options(input_value)
local tokens = Llm.calculate_tokens(generate_prompts_options)
local hint_text = "Tokens: "
.. tostring(tokens)
.. "; "
.. (fn.mode() ~= "i" and Config.mappings.submit.normal or Config.mappings.submit.insert)
.. ": submit" .. ": submit"
if Config.behaviour.enable_token_counting then
local input_value = table.concat(api.nvim_buf_get_lines(self.input_container.bufnr, 0, -1, false), "\n")
local generate_prompts_options = get_generate_prompts_options(input_value)
local tokens = Llm.calculate_tokens(generate_prompts_options)
hint_text = "Tokens: " .. tostring(tokens) .. "; " .. hint_text
end
local buf = api.nvim_create_buf(false, true) local buf = api.nvim_create_buf(false, true)
api.nvim_buf_set_lines(buf, 0, -1, false, { hint_text }) api.nvim_buf_set_lines(buf, 0, -1, false, { hint_text })
api.nvim_buf_add_highlight(buf, 0, "AvantePopupHint", 0, 0, -1) api.nvim_buf_add_highlight(buf, 0, "AvantePopupHint", 0, 0, -1)