From: hunter-nl Date: Thu, 14 Aug 2025 12:24:36 +0000 (+0200) Subject: Update gpt.lua to support newer models without temperature attribute X-Git-Tag: 3.13.0~29^2~8 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=5efcf514b845cc031c649eef77348e0505eb68b5;p=thirdparty%2Frspamd.git Update gpt.lua to support newer models without temperature attribute Newer models do not support temperature attribute anymore. --- diff --git a/src/plugins/lua/gpt.lua b/src/plugins/lua/gpt.lua index 968127a287..5a8bb10abb 100644 --- a/src/plugins/lua/gpt.lua +++ b/src/plugins/lua/gpt.lua @@ -702,9 +702,22 @@ local function openai_check(task, content, sel_part) return 'max_tokens' end + -- Only send temperature if model supports it + local function supports_temperature(model) + if not model then return true end + -- Disallow for reasoning models and GPT-5 family + if model:match('^gpt%-5') or + model:match('^o%d') or + model:match('^o%d%-mini') or + model:match('^gpt%-4%.1') or + model:match('reasoning') then + return false + end + return true + end + local body = { model = settings.model, - temperature = settings.temperature, messages = { { role = 'system', @@ -732,7 +745,12 @@ local function openai_check(task, content, sel_part) -- Set the correct token limit field local token_field = get_max_tokens_field(settings.model) body[token_field] = settings.max_tokens - + + -- Set the temperature field if model supports it + if supports_temperature(settings.model) then + body.temperature = settings.temperature + end + -- Conditionally add response_format if settings.include_response_format then body.response_format = { type = "json_object" }