mirror of https://github.com/Mai-with-u/MaiBot.git
feat: 添加模型级别温度配置并优化温度优先级处理逻辑
parent
17d58d62ce
commit
d97c6aa948
|
|
@ -60,6 +60,9 @@ class ModelInfo(ConfigBase):
|
|||
price_out: float = field(default=0.0)
|
||||
"""每M token输出价格"""
|
||||
|
||||
temperature: float | None = field(default=None)
|
||||
"""模型级别温度(可选),会覆盖任务配置中的温度"""
|
||||
|
||||
force_stream_mode: bool = field(default=False)
|
||||
"""是否强制使用流式输出模式"""
|
||||
|
||||
|
|
|
|||
|
|
@ -315,12 +315,21 @@ class LLMRequest:
|
|||
while retry_remain > 0:
|
||||
try:
|
||||
if request_type == RequestType.RESPONSE:
|
||||
# 温度优先级:参数传入 > 模型级别配置 > extra_params > 任务配置
|
||||
effective_temperature = temperature
|
||||
if effective_temperature is None:
|
||||
effective_temperature = model_info.temperature
|
||||
if effective_temperature is None:
|
||||
effective_temperature = (model_info.extra_params or {}).get("temperature")
|
||||
if effective_temperature is None:
|
||||
effective_temperature = self.model_for_task.temperature
|
||||
|
||||
return await client.get_response(
|
||||
model_info=model_info,
|
||||
message_list=(compressed_messages or message_list),
|
||||
tool_options=tool_options,
|
||||
max_tokens=self.model_for_task.max_tokens if max_tokens is None else max_tokens,
|
||||
temperature=temperature if temperature is not None else (model_info.extra_params or {}).get("temperature", self.model_for_task.temperature),
|
||||
temperature=effective_temperature,
|
||||
response_format=response_format,
|
||||
stream_response_handler=stream_response_handler,
|
||||
async_response_parser=async_response_parser,
|
||||
|
|
|
|||
Loading…
Reference in New Issue