feat: enable streaming for o1 models (#896)
As of a few days ago, o1 models support streaming responses. Please see: https://community.openai.com/t/openai-o1-streaming-now-available-api-access-for-tiers-1-5/1025430
This commit is contained in:
parent
c8e688a0ac
commit
e60ccd2db4
@ -147,7 +147,6 @@ M.parse_curl_args = function(provider, code_opts)
|
||||
-- NOTE: When using "o1" set the supported parameters only
|
||||
local stream = true
|
||||
if base.model and string.find(base.model, "o1") then
|
||||
stream = false
|
||||
body_opts.max_tokens = nil
|
||||
body_opts.temperature = 1
|
||||
end
|
||||
|
Loading…
x
Reference in New Issue
Block a user