From 82e5a710c315e7bd11f053d0923c567395a1466c Mon Sep 17 00:00:00 2001 From: UnCLAS-Prommer Date: Thu, 28 Aug 2025 23:44:14 +0800 Subject: [PATCH] =?UTF-8?q?action=E7=9A=84reply=5Fmessage=E8=AE=BE?= =?UTF-8?q?=E7=BD=AE=E4=B8=BA=E6=95=B0=E6=8D=AE=E6=A8=A1=E5=9E=8B=EF=BC=8C?= =?UTF-8?q?=E7=BB=B4=E6=8A=A4typing=E4=BB=A5=E5=8F=8A=E5=A2=9E=E5=BC=BA?= =?UTF-8?q?=E7=A8=B3=E5=AE=9A=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/chat/planner_actions/action_manager.py | 2 +- src/chat/utils/utils_image.py | 3 +-- src/llm_models/utils_model.py | 23 +++++++++++----------- src/person_info/person_info.py | 2 +- src/plugin_system/__init__.py | 17 ++++++++++++++++ src/plugin_system/base/base_action.py | 13 +++++------- 6 files changed, 36 insertions(+), 24 deletions(-) diff --git a/src/chat/planner_actions/action_manager.py b/src/chat/planner_actions/action_manager.py index b4587474..1de033bf 100644 --- a/src/chat/planner_actions/action_manager.py +++ b/src/chat/planner_actions/action_manager.py @@ -84,7 +84,7 @@ class ActionManager: log_prefix=log_prefix, shutting_down=shutting_down, plugin_config=plugin_config, - action_message=action_message.flatten() if action_message else None, + action_message=action_message, ) logger.debug(f"创建Action实例成功: {action_name}") diff --git a/src/chat/utils/utils_image.py b/src/chat/utils/utils_image.py index 2bec09be..3c9c51e9 100644 --- a/src/chat/utils/utils_image.py +++ b/src/chat/utils/utils_image.py @@ -4,7 +4,6 @@ import time import hashlib import uuid import io -import asyncio import numpy as np from typing import Optional, Tuple @@ -177,7 +176,7 @@ class ImageManager: emotion_prompt, temperature=0.3, max_tokens=50 ) - if emotion_result is None: + if not emotion_result: logger.warning("LLM未能生成情感标签,使用详细描述的前几个词") # 降级处理:从详细描述中提取关键词 import jieba diff --git a/src/llm_models/utils_model.py b/src/llm_models/utils_model.py index 7ab76969..529c52b0 100644 --- a/src/llm_models/utils_model.py +++ b/src/llm_models/utils_model.py @@ -156,19 +156,19 @@ class LLMRequest: """ # 请求体构建 start_time = time.time() - + message_builder = MessageBuilder() message_builder.add_text_content(prompt) messages = [message_builder.build()] - + tool_built = self._build_tool_options(tools) - + # 模型选择 model_info, api_provider, client = self._select_model() - + # 请求并处理返回值 logger.debug(f"LLM选择耗时: {model_info.name} {time.time() - start_time}") - + response = await self._execute_request( api_provider=api_provider, client=client, @@ -179,8 +179,7 @@ class LLMRequest: max_tokens=max_tokens, tool_options=tool_built, ) - - + content = response.content reasoning_content = response.reasoning_content or "" tool_calls = response.tool_calls @@ -188,7 +187,7 @@ class LLMRequest: if not reasoning_content and content: content, extracted_reasoning = self._extract_reasoning(content) reasoning_content = extracted_reasoning - + if usage := response.usage: llm_usage_recorder.record_usage_to_database( model_info=model_info, @@ -199,7 +198,7 @@ class LLMRequest: time_cost=time.time() - start_time, ) - return content, (reasoning_content, model_info.name, tool_calls) + return content or "", (reasoning_content, model_info.name, tool_calls) async def get_embedding(self, embedding_input: str) -> Tuple[List[float], str]: """获取嵌入向量 @@ -248,11 +247,11 @@ class LLMRequest: ) model_info = model_config.get_model_info(least_used_model_name) api_provider = model_config.get_provider(model_info.api_provider) - + # 对于嵌入任务,强制创建新的客户端实例以避免事件循环问题 - force_new_client = (self.request_type == "embedding") + force_new_client = self.request_type == "embedding" client = client_registry.get_client_class_instance(api_provider, force_new=force_new_client) - + logger.debug(f"选择请求模型: {model_info.name}") total_tokens, penalty, usage_penalty = self.model_usage[model_info.name] self.model_usage[model_info.name] = (total_tokens, penalty, usage_penalty + 1) # 增加使用惩罚值防止连续使用 diff --git a/src/person_info/person_info.py b/src/person_info/person_info.py index 3b4c1af6..584af8b8 100644 --- a/src/person_info/person_info.py +++ b/src/person_info/person_info.py @@ -241,7 +241,7 @@ class Person: self.name_reason: Optional[str] = None self.know_times = 0 self.know_since = None - self.last_know = None + self.last_know: Optional[float] = None self.memory_points = [] # 初始化性格特征相关字段 diff --git a/src/plugin_system/__init__.py b/src/plugin_system/__init__.py index 45b8de9b..535b25d4 100644 --- a/src/plugin_system/__init__.py +++ b/src/plugin_system/__init__.py @@ -53,6 +53,15 @@ from .apis import ( get_logger, ) +from src.common.data_models.database_data_model import ( + DatabaseMessages, + DatabaseUserInfo, + DatabaseGroupInfo, + DatabaseChatInfo, +) +from src.common.data_models.info_data_model import TargetPersonInfo, ActionPlannerInfo +from src.common.data_models.llm_data_model import LLMGenerationDataModel + __version__ = "2.0.0" @@ -103,4 +112,12 @@ __all__ = [ # "ManifestGenerator", # "validate_plugin_manifest", # "generate_plugin_manifest", + # 数据模型 + "DatabaseMessages", + "DatabaseUserInfo", + "DatabaseGroupInfo", + "DatabaseChatInfo", + "TargetPersonInfo", + "ActionPlannerInfo", + "LLMGenerationDataModel" ] diff --git a/src/plugin_system/base/base_action.py b/src/plugin_system/base/base_action.py index cd686edb..b6882d85 100644 --- a/src/plugin_system/base/base_action.py +++ b/src/plugin_system/base/base_action.py @@ -39,7 +39,7 @@ class BaseAction(ABC): chat_stream: ChatStream, log_prefix: str = "", plugin_config: Optional[dict] = None, - action_message: Optional[dict] = None, + action_message: Optional["DatabaseMessages"] = None, **kwargs, ): # sourcery skip: hoist-similar-statement-from-if, merge-else-if-into-elif, move-assign-in-block, swap-if-else-branches, swap-nested-ifs @@ -114,16 +114,13 @@ class BaseAction(ABC): if self.action_message: self.has_action_message = True - else: - self.action_message = {} - if self.has_action_message: if self.action_name != "no_action": - self.group_id = str(self.action_message.get("chat_info_group_id", None)) - self.group_name = self.action_message.get("chat_info_group_name", None) + self.group_id = str(self.action_message.chat_info.group_info.group_id if self.action_message.chat_info.group_info else None) + self.group_name = self.action_message.chat_info.group_info.group_name if self.action_message.chat_info.group_info else None - self.user_id = str(self.action_message.get("user_id", None)) - self.user_nickname = self.action_message.get("user_nickname", None) + self.user_id = str(self.action_message.user_info.user_id) + self.user_nickname = self.action_message.user_info.user_nickname if self.group_id: self.is_group = True self.target_id = self.group_id