feat: merge model downgrade feature from main

pull/1202/head
Eric-Terminal 2025-08-21 02:43:18 +08:00
parent a68c68cbe9
commit 734883e1f9
1 changed files with 26 additions and 5 deletions

View File

@ -319,7 +319,8 @@ class LLMRequest:
wait_interval, compressed_messages = self._default_exception_handler( wait_interval, compressed_messages = self._default_exception_handler(
e, e,
self.task_name, self.task_name,
model_name=model_info.name, model_info=model_info,
api_provider=api_provider,
remain_try=retry_remain, remain_try=retry_remain,
retry_interval=api_provider.retry_interval, retry_interval=api_provider.retry_interval,
messages=(message_list, compressed_messages is not None) if message_list else None, messages=(message_list, compressed_messages is not None) if message_list else None,
@ -342,7 +343,8 @@ class LLMRequest:
self, self,
e: Exception, e: Exception,
task_name: str, task_name: str,
model_name: str, model_info: ModelInfo,
api_provider: APIProvider,
remain_try: int, remain_try: int,
retry_interval: int = 10, retry_interval: int = 10,
messages: Tuple[List[Message], bool] | None = None, messages: Tuple[List[Message], bool] | None = None,
@ -359,7 +361,7 @@ class LLMRequest:
Returns: Returns:
(等待间隔如果为0则不等待-1则不再请求该模型, 新的消息列表适用于压缩消息) (等待间隔如果为0则不等待-1则不再请求该模型, 新的消息列表适用于压缩消息)
""" """
model_name = model_info.name
if isinstance(e, NetworkConnectionError): # 网络连接错误 if isinstance(e, NetworkConnectionError): # 网络连接错误
return self._check_retry( return self._check_retry(
remain_try, remain_try,
@ -374,7 +376,8 @@ class LLMRequest:
return self._handle_resp_not_ok( return self._handle_resp_not_ok(
e, e,
task_name, task_name,
model_name, model_info,
api_provider,
remain_try, remain_try,
retry_interval, retry_interval,
messages, messages,
@ -425,7 +428,8 @@ class LLMRequest:
self, self,
e: RespNotOkException, e: RespNotOkException,
task_name: str, task_name: str,
model_name: str, model_info: ModelInfo,
api_provider: APIProvider,
remain_try: int, remain_try: int,
retry_interval: int = 10, retry_interval: int = 10,
messages: tuple[list[Message], bool] | None = None, messages: tuple[list[Message], bool] | None = None,
@ -444,6 +448,23 @@ class LLMRequest:
""" """
# 响应错误 # 响应错误
if e.status_code in [400, 401, 402, 403, 404]: if e.status_code in [400, 401, 402, 403, 404]:
model_name = model_info.name
if (
e.status_code == 403
and model_name.startswith("Pro/deepseek-ai")
and api_provider.base_url == "https://api.siliconflow.cn/v1/"
):
old_model_name = model_name
new_model_name = model_name[4:]
model_info.name = new_model_name
logger.warning(f"检测到403错误模型从 {old_model_name} 降级为 {new_model_name}")
# 更新任务配置中的模型列表
for i, m_name in enumerate(self.model_for_task.model_list):
if m_name == old_model_name:
self.model_for_task.model_list[i] = new_model_name
logger.warning(f"将任务 {self.task_name} 的模型列表中的 {old_model_name} 临时降级至 {new_model_name}")
break
return 0, None # 立即重试
# 客户端错误 # 客户端错误
logger.warning( logger.warning(
f"任务-'{task_name}' 模型-'{model_name}': 请求失败,错误代码-{e.status_code},错误信息-{e.message}" f"任务-'{task_name}' 模型-'{model_name}': 请求失败,错误代码-{e.status_code},错误信息-{e.message}"