diff --git a/src/chat/focus_chat/info_processors/relationship_processor.py b/src/chat/focus_chat/info_processors/relationship_processor.py index d3654502..656f01a0 100644 --- a/src/chat/focus_chat/info_processors/relationship_processor.py +++ b/src/chat/focus_chat/info_processors/relationship_processor.py @@ -94,7 +94,6 @@ class RelationshipProcessor(BaseProcessor): self.llm_model = LLMRequest( model=global_config.model.relation, - max_tokens=800, request_type="focus.relationship", ) diff --git a/src/chat/focus_chat/info_processors/self_processor.py b/src/chat/focus_chat/info_processors/self_processor.py index 450afdba..36dc3c95 100644 --- a/src/chat/focus_chat/info_processors/self_processor.py +++ b/src/chat/focus_chat/info_processors/self_processor.py @@ -56,7 +56,6 @@ class SelfProcessor(BaseProcessor): self.llm_model = LLMRequest( model=global_config.model.relation, - max_tokens=800, request_type="focus.processor.self_identify", ) diff --git a/src/chat/focus_chat/info_processors/tool_processor.py b/src/chat/focus_chat/info_processors/tool_processor.py index 5edad5ff..cf31f441 100644 --- a/src/chat/focus_chat/info_processors/tool_processor.py +++ b/src/chat/focus_chat/info_processors/tool_processor.py @@ -43,7 +43,6 @@ class ToolProcessor(BaseProcessor): self.log_prefix = f"[{subheartflow_id}:ToolExecutor] " self.llm_model = LLMRequest( model=global_config.model.focus_tool_use, - max_tokens=500, request_type="focus.processor.tool", ) self.structured_info = [] diff --git a/src/chat/focus_chat/info_processors/working_memory_processor.py b/src/chat/focus_chat/info_processors/working_memory_processor.py index d40b3c93..9eb84808 100644 --- a/src/chat/focus_chat/info_processors/working_memory_processor.py +++ b/src/chat/focus_chat/info_processors/working_memory_processor.py @@ -61,7 +61,6 @@ class WorkingMemoryProcessor(BaseProcessor): self.llm_model = LLMRequest( model=global_config.model.planner, - max_tokens=800, request_type="focus.processor.working_memory", )