From 1a9bee542e5623add11ac48b6b396d3bdff99ebb Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Fri, 22 Aug 2025 17:22:33 +0800 Subject: [PATCH] =?UTF-8?q?ref=E5=88=86=E7=A6=BBplannerprompt=E4=B8=8E?= =?UTF-8?q?=E4=BA=BA=E6=A0=BC=E8=A7=A3=E8=80=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../heart_flow/heartflow_message_processor.py | 11 +++++++---- src/chat/planner_actions/planner.py | 18 ++++++++++-------- src/chat/replyer/default_generator.py | 2 +- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/chat/heart_flow/heartflow_message_processor.py b/src/chat/heart_flow/heartflow_message_processor.py index 8d0f4426..dc953102 100644 --- a/src/chat/heart_flow/heartflow_message_processor.py +++ b/src/chat/heart_flow/heartflow_message_processor.py @@ -78,8 +78,12 @@ async def _calculate_interest(message: MessageRecv) -> Tuple[float, bool, list[s interested_rate += base_interest if is_mentioned: - interest_increase_on_mention = 1 + interest_increase_on_mention = 2 interested_rate += interest_increase_on_mention + + + message.interest_value = interested_rate + message.is_mentioned = is_mentioned return interested_rate, is_mentioned, keywords @@ -110,9 +114,8 @@ class HeartFCMessageReceiver: chat = message.chat_stream # 2. 兴趣度计算与更新 - interested_rate, is_mentioned, keywords = await _calculate_interest(message) - message.interest_value = interested_rate - message.is_mentioned = is_mentioned + interested_rate, keywords = await _calculate_interest(message) + await self.storage.store_message(message, chat) diff --git a/src/chat/planner_actions/planner.py b/src/chat/planner_actions/planner.py index 2cb2a469..6759c73a 100644 --- a/src/chat/planner_actions/planner.py +++ b/src/chat/planner_actions/planner.py @@ -33,7 +33,7 @@ def init_prompt(): Prompt( """ {time_block} -{identity_block} +{name_block} 你现在需要根据聊天内容,选择的合适的action来参与聊天。 请你根据以下行事风格来决定action: {plan_style} @@ -298,7 +298,7 @@ class ActionPlanner: actions_before_now = get_actions_by_timestamp_with_chat( chat_id=self.chat_id, - timestamp_start=time.time() - 3600, + timestamp_start=time.time() - 600, timestamp_end=time.time(), limit=5, ) @@ -306,8 +306,12 @@ class ActionPlanner: actions_before_now_block = build_readable_actions( actions=actions_before_now, ) + + if actions_before_now: + actions_before_now_block = f"你刚刚选择并执行过的action是:\n{actions_before_now_block}" + else: + actions_before_now_block = "" - actions_before_now_block = f"你刚刚选择并执行过的action是:\n{actions_before_now_block}" if refresh_time: self.last_obs_time_mark = time.time() @@ -322,8 +326,7 @@ class ActionPlanner: 动作:no_action 动作描述:不进行动作,等待合适的时机 - 当你刚刚发送了消息,没有人回复时,选择no_action -- 如果有别的动作(非回复)满足条件,可以不用no_action -- 当你一次发送了太多消息,为了避免打扰聊天节奏,选择no_action +- 当你一次发送了太多消息,为了避免过于烦人,可以不回复 { "action": "no_action", "reason":"不动作的原因" @@ -378,8 +381,7 @@ class ActionPlanner: bot_nickname = f",也有人叫你{','.join(global_config.bot.alias_names)}" else: bot_nickname = "" - bot_core_personality = global_config.personality.personality_core - identity_block = f"你的名字是{bot_name}{bot_nickname},你{bot_core_personality}:" + name_block = f"你的名字是{bot_name}{bot_nickname},请注意哪些是你自己的发言。" planner_prompt_template = await global_prompt_manager.get_prompt_async("planner_prompt") prompt = planner_prompt_template.format( @@ -391,7 +393,7 @@ class ActionPlanner: mentioned_bonus=mentioned_bonus, action_options_text=action_options_block, moderation_prompt=moderation_prompt_block, - identity_block=identity_block, + name_block=name_block, plan_style=global_config.personality.plan_style, ) return prompt, message_id_list diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/default_generator.py index 59340914..c8a78aee 100644 --- a/src/chat/replyer/default_generator.py +++ b/src/chat/replyer/default_generator.py @@ -1015,7 +1015,7 @@ class DefaultReplyer: async def llm_generate_content(self, prompt: str): with Timer("LLM生成", {}): # 内部计时器,可选保留 # 直接使用已初始化的模型实例 - logger.info(f"使用模型集生成回复: {self.express_model.model_for_task}") + logger.info(f"使用模型集生成回复: {', '.join(map(str, self.express_model.model_for_task.model_list))}") if global_config.debug.show_prompt: logger.info(f"\n{prompt}\n")