From d807ddc8978debaa2647e7f25ed96653046c9100 Mon Sep 17 00:00:00 2001 From: KawaiiYusora Date: Mon, 3 Mar 2025 20:30:17 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/bot_config.toml | 52 +++++++++++++++++++++++--------------- src/plugins/chat/config.py | 1 + 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/config/bot_config.toml b/config/bot_config.toml index 83526945..6d4a143c 100644 --- a/config/bot_config.toml +++ b/config/bot_config.toml @@ -3,59 +3,71 @@ qq = 123 nickname = "麦麦" [message] -min_text_length = 2 -max_context_size = 15 -emoji_chance = 0.2 +min_text_length = 2 # 与麦麦聊天时麦麦只会回答文本大于等于此数的消息 +max_context_size = 15 # 麦麦获得的上下文数量,超出数量后自动丢弃 +emoji_chance = 0.2 # 麦麦使用表情包的概率 [emoji] -check_interval = 120 -register_interval = 10 +check_interval = 120 # 检查表情包的时间间隔 +register_interval = 10 # 注册表情包的时间间隔 [cq_code] enable_pic_translate = false [response] -api_using = "siliconflow" -api_paid = true -model_r1_probability = 0.8 -model_v3_probability = 0.1 -model_r1_distill_probability = 0.1 +api_using = "siliconflow" # 使用的API "siliconflow" 或 "deepseek" +api_paid = true # siliconflow有没有给钱 决定了能不能用siliconflow的部分付费模型 +model_r1_probability = 0.8 # 麦麦回答时选择R1模型的概率 +model_v3_probability = 0.1 # 麦麦回答时选择V3模型的概率 +model_r1_distill_probability = 0.1 # 麦麦回答时选择R1蒸馏模型的概率 [memory] -build_memory_interval = 300 +build_memory_interval = 300 # 记忆构建间隔 单位秒 [others] -enable_advance_output = true +enable_advance_output = true # 是否启用高级输出 +enable_kuuki_read = true # 是否启用读空气功能 [groups] talk_allowed = [ 123, 123, -] -talk_frequency_down = [] -ban_user_id = [] +] #可以回复消息的群 +talk_frequency_down = [] #降低回复频率的群 +ban_user_id = [] #禁止回复消息的QQ号 -[model.llm_reasoning] + +#V3 +#name = "deepseek-chat" +#base_url = "DEEP_SEEK_BASE_URL" +#key = "DEEP_SEEK_KEY" + +#R1 +#name = "deepseek-reasoner" +#base_url = "DEEP_SEEK_BASE_URL" +#key = "DEEP_SEEK_KEY" + +[model.llm_reasoning] #R1 name = "Pro/deepseek-ai/DeepSeek-R1" base_url = "SILICONFLOW_BASE_URL" key = "SILICONFLOW_KEY" -[model.llm_reasoning_minor] +[model.llm_reasoning_minor] #R1蒸馏 name = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" base_url = "SILICONFLOW_BASE_URL" key = "SILICONFLOW_KEY" -[model.llm_normal] +[model.llm_normal] #V3 name = "Pro/deepseek-ai/DeepSeek-V3" base_url = "SILICONFLOW_BASE_URL" key = "SILICONFLOW_KEY" -[model.llm_normal_minor] +[model.llm_normal_minor] #V2.5 name = "deepseek-ai/DeepSeek-V2.5" base_url = "SILICONFLOW_BASE_URL" key = "SILICONFLOW_KEY" -[model.vlm] +[model.vlm] #图像识别 name = "deepseek-ai/deepseek-vl2" base_url = "SILICONFLOW_BASE_URL" key = "SILICONFLOW_KEY" diff --git a/src/plugins/chat/config.py b/src/plugins/chat/config.py index 55ceb07b..caef55be 100644 --- a/src/plugins/chat/config.py +++ b/src/plugins/chat/config.py @@ -131,6 +131,7 @@ class BotConfig: if "others" in toml_dict: others_config = toml_dict["others"] config.enable_advance_output = others_config.get("enable_advance_output", config.enable_advance_output) + config.enable_kuuki_read = others_config.get("enable_kuuki_read", config.enable_kuuki_read) logger.success(f"成功加载配置文件: {config_path}")