From bf71fe739dcb9bc21a132c0c3c7473a7a1bde427 Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Thu, 20 Nov 2025 01:28:16 +0800 Subject: [PATCH] =?UTF-8?q?fix=EF=BC=9Aemoji=E4=BD=BF=E7=94=A8utils?= =?UTF-8?q?=E6=A8=A1=E5=9E=8B=E8=80=8C=E4=B8=8D=E6=98=AFreplyer?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/built_in/emoji_plugin/emoji.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/plugins/built_in/emoji_plugin/emoji.py b/src/plugins/built_in/emoji_plugin/emoji.py index 7078a817..3783886a 100644 --- a/src/plugins/built_in/emoji_plugin/emoji.py +++ b/src/plugins/built_in/emoji_plugin/emoji.py @@ -102,13 +102,13 @@ class EmojiAction(BaseAction): # 5. 调用LLM models = llm_api.get_available_models() - chat_model_config = models.get("replyer") # 使用字典访问方式 + chat_model_config = models.get("utils") # 使用字典访问方式 if not chat_model_config: - logger.error(f"{self.log_prefix} 未找到'replyer'模型配置,无法调用LLM") - return False, "未找到'replyer'模型配置" + logger.error(f"{self.log_prefix} 未找到'utils'模型配置,无法调用LLM") + return False, "未找到'utils'模型配置" success, chosen_emotion, _, _ = await llm_api.generate_with_model( - prompt, model_config=chat_model_config, request_type="emoji" + prompt, model_config=chat_model_config, request_type="emoji.select" ) if not success: