From 1b31702b53c61d1fc6b1941ad766b2506ae180a4 Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Wed, 17 Sep 2025 19:37:27 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E7=A7=81=E8=81=8A=E9=97=AE?= =?UTF-8?q?=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...efault_generator.py => group_generator.py} | 0 src/chat/replyer/private_generator.py | 146 ++---------------- src/chat/replyer/prompt/replyer_prompt.py | 23 ++- src/chat/replyer/replyer_manager.py | 2 +- src/plugin_system/apis/generator_api.py | 2 +- 5 files changed, 34 insertions(+), 139 deletions(-) rename src/chat/replyer/{default_generator.py => group_generator.py} (100%) diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/group_generator.py similarity index 100% rename from src/chat/replyer/default_generator.py rename to src/chat/replyer/group_generator.py diff --git a/src/chat/replyer/private_generator.py b/src/chat/replyer/private_generator.py index 7b81a28c..0faff4b0 100644 --- a/src/chat/replyer/private_generator.py +++ b/src/chat/replyer/private_generator.py @@ -434,129 +434,6 @@ class PrivateReplyer: duration = end_time - start_time return name, result, duration - def build_s4u_chat_history_prompts( - self, message_list_before_now: List[DatabaseMessages], target_user_id: str, sender: str - ) -> Tuple[str, str]: - """ - 构建 s4u 风格的分离对话 prompt - - Args: - message_list_before_now: 历史消息列表 - target_user_id: 目标用户ID(当前对话对象) - - Returns: - Tuple[str, str]: (核心对话prompt, 背景对话prompt) - """ - core_dialogue_list: List[DatabaseMessages] = [] - bot_id = str(global_config.bot.qq_account) - - # 过滤消息:分离bot和目标用户的对话 vs 其他用户的对话 - for msg in message_list_before_now: - try: - msg_user_id = str(msg.user_info.user_id) - reply_to = msg.reply_to - _platform, reply_to_user_id = self._parse_reply_target(reply_to) - if (msg_user_id == bot_id and reply_to_user_id == target_user_id) or msg_user_id == target_user_id: - # bot 和目标用户的对话 - core_dialogue_list.append(msg) - except Exception as e: - logger.error(f"处理消息记录时出错: {msg}, 错误: {e}") - - # 构建核心对话 prompt - core_dialogue_prompt = "" - if core_dialogue_list: - # 检查最新五条消息中是否包含bot自己说的消息 - latest_5_messages = core_dialogue_list[-5:] if len(core_dialogue_list) >= 5 else core_dialogue_list - has_bot_message = any(str(msg.user_info.user_id) == bot_id for msg in latest_5_messages) - - # logger.info(f"最新五条消息:{latest_5_messages}") - # logger.info(f"最新五条消息中是否包含bot自己说的消息:{has_bot_message}") - - # 如果最新五条消息中不包含bot的消息,则返回空字符串 - if not has_bot_message: - core_dialogue_prompt = "" - else: - core_dialogue_list = core_dialogue_list[ - -int(global_config.chat.max_context_size * 0.6) : - ] # 限制消息数量 - - core_dialogue_prompt_str = build_readable_messages( - core_dialogue_list, - replace_bot_name=True, - timestamp_mode="normal_no_YMD", - read_mark=0.0, - truncate=True, - show_actions=True, - ) - core_dialogue_prompt = f"""-------------------------------- -这是你和{sender}的对话,你们正在交流中: -{core_dialogue_prompt_str} --------------------------------- -""" - - - # 构建背景对话 prompt - all_dialogue_prompt = "" - if message_list_before_now: - latest_25_msgs = message_list_before_now[-int(global_config.chat.max_context_size) :] - all_dialogue_prompt_str = build_readable_messages( - latest_25_msgs, - replace_bot_name=True, - timestamp_mode="normal_no_YMD", - truncate=True, - ) - if core_dialogue_prompt: - all_dialogue_prompt = f"所有用户的发言:\n{all_dialogue_prompt_str}" - else: - all_dialogue_prompt = f"{all_dialogue_prompt_str}" - - return core_dialogue_prompt, all_dialogue_prompt - - def build_mai_think_context( - self, - chat_id: str, - memory_block: str, - relation_info: str, - time_block: str, - chat_target_1: str, - chat_target_2: str, - mood_prompt: str, - identity_block: str, - sender: str, - target: str, - chat_info: str, - ) -> Any: - """构建 mai_think 上下文信息 - - Args: - chat_id: 聊天ID - memory_block: 记忆块内容 - relation_info: 关系信息 - time_block: 时间块内容 - chat_target_1: 聊天目标1 - chat_target_2: 聊天目标2 - mood_prompt: 情绪提示 - identity_block: 身份块内容 - sender: 发送者名称 - target: 目标消息内容 - chat_info: 聊天信息 - - Returns: - Any: mai_think 实例 - """ - mai_think = mai_thinking_manager.get_mai_think(chat_id) - mai_think.memory_block = memory_block - mai_think.relation_info_block = relation_info - mai_think.time_block = time_block - mai_think.chat_target = chat_target_1 - mai_think.chat_target_2 = chat_target_2 - mai_think.chat_info = chat_info - mai_think.mood_state = mood_prompt - mai_think.identity = identity_block - mai_think.sender = sender - mai_think.target = target - return mai_think - async def build_actions_prompt( self, available_actions: Dict[str, ActionInfo], chosen_actions_info: Optional[List[ActionPlannerInfo]] = None ) -> str: @@ -630,7 +507,6 @@ class PrivateReplyer: available_actions = {} chat_stream = self.chat_stream chat_id = chat_stream.stream_id - is_group_chat = bool(chat_stream.group_info) platform = chat_stream.platform user_id = "用户ID" @@ -656,7 +532,15 @@ class PrivateReplyer: message_list_before_now_long = get_raw_msg_before_timestamp_with_chat( chat_id=chat_id, timestamp=time.time(), - limit=global_config.chat.max_context_size * 1, + limit=global_config.chat.max_context_size, + ) + + dialogue_prompt = build_readable_messages( + message_list_before_now_long, + replace_bot_name=True, + timestamp_mode="relative", + read_mark=0.0, + show_actions=True, ) message_list_before_short = get_raw_msg_before_timestamp_with_chat( @@ -762,12 +646,6 @@ class PrivateReplyer: f"现在对方说的:{target}。引起了你的注意" ) - - # 构建分离的对话 prompt - core_dialogue_prompt, background_dialogue_prompt = self.build_s4u_chat_history_prompts( - message_list_before_now_long, user_id, sender - ) - if global_config.bot.qq_account == user_id and platform == global_config.bot.platform: return await global_prompt_manager.format_prompt( "private_replyer_self_prompt", @@ -780,7 +658,7 @@ class PrivateReplyer: identity=personality_prompt, action_descriptions=actions_info, mood_state=mood_prompt, - background_dialogue_prompt=background_dialogue_prompt, + dialogue_prompt=dialogue_prompt, time_block=time_block, target=target, reason=reply_reason, @@ -791,7 +669,7 @@ class PrivateReplyer: ), selected_expressions else: return await global_prompt_manager.format_prompt( - "replyer_prompt", + "private_replyer_prompt", expression_habits_block=expression_habits_block, tool_info_block=tool_info, knowledge_prompt=prompt_info, @@ -802,7 +680,7 @@ class PrivateReplyer: action_descriptions=actions_info, sender_name=sender, mood_state=mood_prompt, - dialogue_prompt=background_dialogue_prompt, + dialogue_prompt=dialogue_prompt, time_block=time_block, sender = sender, reply_target_block=reply_target_block, diff --git a/src/chat/replyer/prompt/replyer_prompt.py b/src/chat/replyer/prompt/replyer_prompt.py index 8ce7a0b5..2febc084 100644 --- a/src/chat/replyer/prompt/replyer_prompt.py +++ b/src/chat/replyer/prompt/replyer_prompt.py @@ -36,9 +36,6 @@ def init_replyer_prompt(): """{knowledge_prompt}{relation_info_block}{tool_info_block}{extra_info_block} {expression_habits_block} - - - 你正在qq群里聊天,下面是群里正在聊的内容: {time_block} {background_dialogue_prompt} @@ -72,4 +69,24 @@ def init_replyer_prompt(): 请注意不要输出多余内容(包括前后缀,冒号和引号,括号,表情等),只输出回复内容。 {moderation_prompt}不要输出多余内容(包括前后缀,冒号和引号,括号,表情包,at或 @等 )。""", "private_replyer_prompt", + ) + + + Prompt( + """{knowledge_prompt}{relation_info_block}{tool_info_block}{extra_info_block} +{expression_habits_block} + +你正在和{sender_name}聊天,这是你们之前聊的内容: +{time_block} +{dialogue_prompt} + +你现在想补充说明你刚刚自己的发言内容:{target},原因是{reason} +请你根据聊天内容,组织一条新回复。注意,{target} 是刚刚你自己的发言,你要在这基础上进一步发言,请按照你自己的角度来继续进行回复。注意保持上下文的连贯性。 +{identity} +尽量简短一些。{keywords_reaction_prompt}请注意把握聊天内容,不要回复的太有条理,可以有个性。 +{reply_style} +请注意不要输出多余内容(包括前后缀,冒号和引号,括号,表情等),只输出回复内容。 +{moderation_prompt}不要输出多余内容(包括前后缀,冒号和引号,括号,表情包,at或 @等 )。 +""", + "private_replyer_self_prompt", ) \ No newline at end of file diff --git a/src/chat/replyer/replyer_manager.py b/src/chat/replyer/replyer_manager.py index b5e7350d..c7afddc9 100644 --- a/src/chat/replyer/replyer_manager.py +++ b/src/chat/replyer/replyer_manager.py @@ -2,7 +2,7 @@ from typing import Dict, Optional from src.common.logger import get_logger from src.chat.message_receive.chat_stream import ChatStream, get_chat_manager -from src.chat.replyer.default_generator import DefaultReplyer +from src.chat.replyer.group_generator import DefaultReplyer from src.chat.replyer.private_generator import PrivateReplyer logger = get_logger("ReplyerManager") diff --git a/src/plugin_system/apis/generator_api.py b/src/plugin_system/apis/generator_api.py index 39fb5727..335cc18f 100644 --- a/src/plugin_system/apis/generator_api.py +++ b/src/plugin_system/apis/generator_api.py @@ -13,7 +13,7 @@ from typing import Tuple, Any, Dict, List, Optional, TYPE_CHECKING from rich.traceback import install from src.common.logger import get_logger from src.common.data_models.message_data_model import ReplySetModel -from src.chat.replyer.default_generator import DefaultReplyer +from src.chat.replyer.group_generator import DefaultReplyer from src.chat.replyer.private_generator import PrivateReplyer from src.chat.message_receive.chat_stream import ChatStream from src.chat.utils.utils import process_llm_response