From: Vsevolod Stakhov Date: Mon, 22 Jul 2024 12:06:51 +0000 (+0100) Subject: [Minor] Remove top_p reduce temperature to 0 X-Git-Tag: 3.9.1~1^2~4 X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=d019b0dfc11ca01c19a2889db0605ff1a776562b;p=thirdparty%2Frspamd.git [Minor] Remove top_p reduce temperature to 0 --- diff --git a/conf/modules.d/gpt.conf b/conf/modules.d/gpt.conf index 7a2e11d400..eac9952e5c 100644 --- a/conf/modules.d/gpt.conf +++ b/conf/modules.d/gpt.conf @@ -22,9 +22,7 @@ gpt { # Maximum tokens to generate max_tokens = 1000; # Temperature for sampling - temperature = 0.7; - # Top p for sampling - top_p = 0.9; + temperature = 0.0; # Timeout for requests timeout = 10s; # Prompt for the model (use default if not set) diff --git a/src/plugins/lua/gpt.lua b/src/plugins/lua/gpt.lua index 6adbce3bf5..046fe5dd78 100644 --- a/src/plugins/lua/gpt.lua +++ b/src/plugins/lua/gpt.lua @@ -31,9 +31,7 @@ gpt { # Maximum tokens to generate max_tokens = 1000; # Temperature for sampling - temperature = 0.7; - # Top p for sampling - top_p = 0.9; + temperature = 0.0; # Timeout for requests timeout = 10s; # Prompt for the model (use default if not set) @@ -73,8 +71,7 @@ local settings = { api_key = nil, model = 'gpt-3.5-turbo', max_tokens = 1000, - temperature = 0.7, - top_p = 0.9, + temperature = 0.0, timeout = 10, prompt = nil, condition = nil, @@ -276,7 +273,6 @@ local function openai_gpt_check(task) model = settings.model, max_tokens = settings.max_tokens, temperature = settings.temperature, - top_p = settings.top_p, messages = { { role = 'system',