mirror of https://github.com/Mai-with-u/MaiBot.git
remove:无用模型
parent
0852af49f9
commit
a3c3fcf518
|
|
@ -22,7 +22,6 @@ class QAManager:
|
|||
):
|
||||
self.embed_manager = embed_manager
|
||||
self.kg_manager = kg_manager
|
||||
self.qa_model = LLMRequest(model_set=model_config.model_task_config.lpmm_qa, request_type="lpmm.qa")
|
||||
|
||||
async def process_query(
|
||||
self, question: str
|
||||
|
|
|
|||
|
|
@ -132,9 +132,6 @@ class ModelTaskConfig(ConfigBase):
|
|||
lpmm_rdf_build: TaskConfig
|
||||
"""LPMM RDF构建模型配置"""
|
||||
|
||||
lpmm_qa: TaskConfig
|
||||
"""LPMM问答模型配置"""
|
||||
|
||||
def get_task(self, task_name: str) -> TaskConfig:
|
||||
"""获取指定任务的配置"""
|
||||
if hasattr(self, task_name):
|
||||
|
|
|
|||
|
|
@ -848,11 +848,7 @@ class ChatHistorySummarizer:
|
|||
)
|
||||
|
||||
try:
|
||||
response, _ = await self.summarizer_llm.generate_response_async(
|
||||
prompt=prompt,
|
||||
temperature=0.3,
|
||||
max_tokens=500,
|
||||
)
|
||||
response, _ = await self.summarizer_llm.generate_response_async(prompt=prompt)
|
||||
|
||||
# 解析JSON响应
|
||||
json_str = response.strip()
|
||||
|
|
|
|||
|
|
@ -191,10 +191,4 @@ slow_threshold = 20.0
|
|||
model_list = ["siliconflow-deepseek-v3.2"]
|
||||
temperature = 0.2
|
||||
max_tokens = 800
|
||||
slow_threshold = 20.0
|
||||
|
||||
[model_task_config.lpmm_qa] # 问答模型
|
||||
model_list = ["siliconflow-deepseek-v3.2"]
|
||||
temperature = 0.7
|
||||
max_tokens = 800
|
||||
slow_threshold = 20.0
|
||||
slow_threshold = 20.0
|
||||
Loading…
Reference in New Issue