From 3900684ffd94225572074f611beb262b9d4a9b4f Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Fri, 12 Sep 2025 13:46:10 +0800 Subject: [PATCH] =?UTF-8?q?fix:=E8=AF=86=E5=9B=BEtoken=E9=99=90=E5=88=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/chat/utils/utils_image.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chat/utils/utils_image.py b/src/chat/utils/utils_image.py index 0a4e5f36..27384d28 100644 --- a/src/chat/utils/utils_image.py +++ b/src/chat/utils/utils_image.py @@ -153,7 +153,7 @@ class ImageManager: "这是一个表情包,请详细描述一下表情包所表达的情感和内容,描述细节,从互联网梗,meme的角度去分析" ) detailed_description, _ = await self.vlm.generate_response_for_image( - vlm_prompt, image_base64, image_format, temperature=0.4, max_tokens=300 + vlm_prompt, image_base64, image_format, temperature=0.4 ) if detailed_description is None: @@ -175,7 +175,7 @@ class ImageManager: # 使用较低温度确保输出稳定 emotion_llm = LLMRequest(model_set=model_config.model_task_config.utils, request_type="emoji") emotion_result, _ = await emotion_llm.generate_response_async( - emotion_prompt, temperature=0.3, max_tokens=50 + emotion_prompt, temperature=0.3 ) if not emotion_result: