From: NameWeb Date: Fri, 9 May 2025 08:29:05 +0000 (+0200) Subject: Remove "thinking" and fix errorlog X-Git-Tag: 3.12.0~24^2~1 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=8f7c7f22885051f4fa20bdfb067b96a726f9db27;p=thirdparty%2Frspamd.git Remove "thinking" and fix errorlog Some models used by Ollama will include "thinking" before the actual response. We now remove this. Also fixed error logging, which probably intended to also log "first_message". --- diff --git a/src/plugins/lua/gpt.lua b/src/plugins/lua/gpt.lua index 5d1cf5e067..19c3643562 100644 --- a/src/plugins/lua/gpt.lua +++ b/src/plugins/lua/gpt.lua @@ -359,10 +359,19 @@ local function default_openai_plain_conversion(task, input) return spam_score, reason, categories end - rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1]) + rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1], first_message) return end +-- Helper function to remove ... and trim leading newlines +local function clean_gpt_response(text) + -- Remove ... including multiline + text = text:gsub(".-", "") + -- Trim leading whitespace and newlines + text = text:gsub("^%s*\n*", "") + return text +end + local function default_ollama_plain_conversion(task, input) local parser = ucl.parser() local res, err = parser:parse_string(input) @@ -387,6 +396,10 @@ local function default_ollama_plain_conversion(task, input) rspamd_logger.errx(task, 'no content in the first message') return end + + -- Clean message + first_message = clean_gpt_response(first_message) + local lines = lua_util.str_split(first_message, '\n') local first_line = clean_reply_line(lines[1]) local spam_score = tonumber(first_line) @@ -397,7 +410,7 @@ local function default_ollama_plain_conversion(task, input) return spam_score, reason, categories end - rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s', lines[1]) + rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1], first_message) return end