mirror of https://github.com/Mai-with-u/MaiBot.git
fix of types
parent
246961aadf
commit
91e716a24c
|
|
@ -305,8 +305,9 @@ class BrainPlanner:
|
|||
else:
|
||||
actions_before_now_block = ""
|
||||
|
||||
# 构建聊天上下文描述
|
||||
chat_context_description = f"你正在和 {chat_target_info.person_name or chat_target_info.user_nickname or '对方'} 聊天中"
|
||||
if chat_target_info:
|
||||
# 构建聊天上下文描述
|
||||
chat_context_description = f"你正在和 {chat_target_info.person_name or chat_target_info.user_nickname or '对方'} 聊天中"
|
||||
|
||||
# 构建动作选项块
|
||||
action_options_block = await self._build_action_options_block(current_available_actions)
|
||||
|
|
|
|||
|
|
@ -192,21 +192,18 @@ class HeartFChatting:
|
|||
return True
|
||||
|
||||
self.last_read_time = time.time()
|
||||
|
||||
|
||||
# !此处使at或者提及必定回复
|
||||
metioned_message = None
|
||||
mentioned_message = None
|
||||
for message in recent_messages_list:
|
||||
if (message.is_mentioned or message.is_at) and global_config.chat.mentioned_bot_reply:
|
||||
metioned_message = message
|
||||
|
||||
mentioned_message = message
|
||||
|
||||
# *控制频率用
|
||||
if not metioned_message:
|
||||
if random.random() > global_config.chat.talk_value:
|
||||
return True
|
||||
|
||||
await self._observe(
|
||||
recent_messages_list=recent_messages_list,force_reply_message=metioned_message
|
||||
)
|
||||
if mentioned_message:
|
||||
await self._observe(recent_messages_list=recent_messages_list, force_reply_message=mentioned_message)
|
||||
elif random.random() > global_config.chat.talk_value:
|
||||
return True
|
||||
else:
|
||||
# Normal模式:消息数量不足,等待
|
||||
await asyncio.sleep(0.2)
|
||||
|
|
@ -265,7 +262,8 @@ class HeartFChatting:
|
|||
|
||||
async def _observe(
|
||||
self, # interest_value: float = 0.0,
|
||||
recent_messages_list: Optional[List["DatabaseMessages"]] = None, force_reply_message:"DatabaseMessages" = None
|
||||
recent_messages_list: Optional[List["DatabaseMessages"]] = None,
|
||||
force_reply_message: Optional["DatabaseMessages"] = None,
|
||||
) -> bool: # sourcery skip: merge-else-if-into-elif, remove-redundant-if
|
||||
if recent_messages_list is None:
|
||||
recent_messages_list = []
|
||||
|
|
@ -542,7 +540,6 @@ class HeartFChatting:
|
|||
"""执行单个动作的通用函数"""
|
||||
try:
|
||||
with Timer(f"动作{action_planner_info.action_type}", cycle_timers):
|
||||
|
||||
if action_planner_info.action_type == "no_reply":
|
||||
# 直接处理no_action逻辑,不再通过动作系统
|
||||
reason = action_planner_info.reasoning or "选择不回复"
|
||||
|
|
@ -586,7 +583,9 @@ class HeartFChatting:
|
|||
|
||||
if not success or not llm_response or not llm_response.reply_set:
|
||||
if action_planner_info.action_message:
|
||||
logger.info(f"对 {action_planner_info.action_message.processed_plain_text} 的回复生成失败")
|
||||
logger.info(
|
||||
f"对 {action_planner_info.action_message.processed_plain_text} 的回复生成失败"
|
||||
)
|
||||
else:
|
||||
logger.info("回复生成失败")
|
||||
return {"action_type": "reply", "success": False, "reply_text": "", "loop_info": None}
|
||||
|
|
|
|||
|
|
@ -23,17 +23,16 @@ class Heartflow:
|
|||
if chat := self.heartflow_chat_list.get(chat_id):
|
||||
return chat
|
||||
else:
|
||||
chat_stream: ChatStream = get_chat_manager().get_stream(chat_id)
|
||||
chat_stream: ChatStream | None = get_chat_manager().get_stream(chat_id)
|
||||
if not chat_stream:
|
||||
raise ValueError(f"未找到 chat_id={chat_id} 的聊天流")
|
||||
if chat_stream.group_info:
|
||||
new_chat = HeartFChatting(chat_id=chat_id)
|
||||
await new_chat.start()
|
||||
self.heartflow_chat_list[chat_id] = new_chat
|
||||
return new_chat
|
||||
else:
|
||||
new_chat = BrainChatting(chat_id=chat_id)
|
||||
await new_chat.start()
|
||||
self.heartflow_chat_list[chat_id] = new_chat
|
||||
return new_chat
|
||||
await new_chat.start()
|
||||
self.heartflow_chat_list[chat_id] = new_chat
|
||||
return new_chat
|
||||
except Exception as e:
|
||||
logger.error(f"创建心流聊天 {chat_id} 失败: {e}", exc_info=True)
|
||||
traceback.print_exc()
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class Message(MessageBase):
|
|||
processed_text = await self._process_message_segments(message.message_segment)
|
||||
if processed_text:
|
||||
segments_text.append(f"{global_config.bot.nickname}: {processed_text}")
|
||||
return "[合并消息]: " + "\n".join(segments_text)
|
||||
return "[合并消息]: " + "\n-- ".join(segments_text)
|
||||
else:
|
||||
# 处理单个消息段
|
||||
return await self._process_single_segment(segment) # type: ignore
|
||||
|
|
|
|||
|
|
@ -222,6 +222,7 @@ class ImageManager:
|
|||
img_obj.save()
|
||||
except Images.DoesNotExist: # type: ignore
|
||||
Images.create(
|
||||
image_id=str(uuid.uuid4()),
|
||||
emoji_hash=image_hash,
|
||||
path=file_path,
|
||||
type="emoji",
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class ChatMood:
|
|||
time_multiplier = 4 * (1 - math.exp(-0.01 * during_last_time))
|
||||
|
||||
# 基于消息长度计算基础兴趣度
|
||||
message_length = len(message.message_content.content or "")
|
||||
message_length = len(message.processed_plain_text or "")
|
||||
interest_multiplier = min(2.0, 1.0 + message_length / 100)
|
||||
|
||||
logger.debug(
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from rich.traceback import install
|
|||
from src.common.logger import get_logger
|
||||
from src.common.data_models.message_data_model import ReplySetModel
|
||||
from src.chat.replyer.default_generator import DefaultReplyer
|
||||
from src.chat.replyer.private_generator import PrivateReplyer
|
||||
from src.chat.message_receive.chat_stream import ChatStream
|
||||
from src.chat.utils.utils import process_llm_response
|
||||
from src.chat.replyer.replyer_manager import replyer_manager
|
||||
|
|
@ -38,7 +39,7 @@ def get_replyer(
|
|||
chat_stream: Optional[ChatStream] = None,
|
||||
chat_id: Optional[str] = None,
|
||||
request_type: str = "replyer",
|
||||
) -> Optional[DefaultReplyer]:
|
||||
) -> Optional[DefaultReplyer | PrivateReplyer]:
|
||||
"""获取回复器对象
|
||||
|
||||
优先使用chat_stream,如果没有则使用chat_id直接查找。
|
||||
|
|
|
|||
Loading…
Reference in New Issue