| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | import json | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  | from abc import ABC, abstractmethod | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | from collections.abc import Generator, Mapping, Sequence | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  | from typing import Any, Optional | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  | from core.agent.base_agent_runner import BaseAgentRunner | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  | from core.agent.entities import AgentScratchpadUnit | 
					
						
							|  |  |  | from core.agent.output_parser.cot_output_parser import CotAgentOutputParser | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  | from core.app.apps.base_app_queue_manager import PublishFrom | 
					
						
							|  |  |  | from core.app.entities.queue_entities import QueueAgentThoughtEvent, QueueMessageEndEvent, QueueMessageFileEvent | 
					
						
							| 
									
										
										
										
											2024-02-01 18:11:57 +08:00
										 |  |  | from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage | 
					
						
							| 
									
										
										
										
											2024-02-06 13:21:13 +08:00
										 |  |  | from core.model_runtime.entities.message_entities import ( | 
					
						
							|  |  |  |     AssistantPromptMessage, | 
					
						
							|  |  |  |     PromptMessage, | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |     PromptMessageTool, | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |     ToolPromptMessage, | 
					
						
							| 
									
										
										
										
											2024-02-06 13:21:13 +08:00
										 |  |  |     UserPromptMessage, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2024-06-26 17:33:29 +08:00
										 |  |  | from core.ops.ops_trace_manager import TraceQueueManager | 
					
						
							| 
									
										
										
										
											2024-05-29 15:25:20 +08:00
										 |  |  | from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | from core.tools.__base.tool import Tool | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  | from core.tools.entities.tool_entities import ToolInvokeMeta | 
					
						
							|  |  |  | from core.tools.tool_engine import ToolEngine | 
					
						
							| 
									
										
										
										
											2024-04-10 14:48:40 +08:00
										 |  |  | from models.model import Message | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-01 18:11:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  | class CotAgentRunner(BaseAgentRunner, ABC): | 
					
						
							| 
									
										
										
										
											2024-03-05 14:02:07 +08:00
										 |  |  |     _is_first_iteration = True | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |     _ignore_observation_providers = ["wenxin"] | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |     _historic_prompt_messages: list[PromptMessage] | 
					
						
							|  |  |  |     _agent_scratchpad: list[AgentScratchpadUnit] | 
					
						
							|  |  |  |     _instruction: str | 
					
						
							|  |  |  |     _query: str | 
					
						
							|  |  |  |     _prompt_messages_tools: Sequence[PromptMessageTool] | 
					
						
							| 
									
										
										
										
											2024-03-05 14:02:07 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |     def run( | 
					
						
							|  |  |  |         self, | 
					
						
							|  |  |  |         message: Message, | 
					
						
							|  |  |  |         query: str, | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |         inputs: Mapping[str, str], | 
					
						
							|  |  |  |     ) -> Generator: | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  |         Run Cot agent application | 
					
						
							|  |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  |         app_generate_entity = self.application_generate_entity | 
					
						
							|  |  |  |         self._repack_app_generate_entity(app_generate_entity) | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         self._init_react_state(query) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-06-26 17:33:29 +08:00
										 |  |  |         trace_manager = app_generate_entity.trace_manager | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  |         # check model mode | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         if "Observation" not in app_generate_entity.model_conf.stop: | 
					
						
							| 
									
										
										
										
											2024-06-14 01:05:37 +08:00
										 |  |  |             if app_generate_entity.model_conf.provider not in self._ignore_observation_providers: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 app_generate_entity.model_conf.stop.append("Observation") | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         app_config = self.app_config | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         assert app_config.agent | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         # init instruction | 
					
						
							| 
									
										
										
										
											2024-02-05 18:11:06 +08:00
										 |  |  |         inputs = inputs or {} | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         instruction = app_config.prompt_template.simple_prompt_template or "" | 
					
						
							|  |  |  |         self._instruction = self._fill_in_inputs_from_external_data_tools(instruction, inputs) | 
					
						
							| 
									
										
										
										
											2024-02-05 18:11:06 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |         iteration_step = 1 | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |         max_iteration_steps = min(app_config.agent.max_iteration if app_config.agent else 5, 5) + 1 | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # convert tools into ModelRuntime Tool format | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         tool_instances, prompt_messages_tools = self._init_prompt_tools() | 
					
						
							|  |  |  |         self._prompt_messages_tools = prompt_messages_tools | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         function_call_state = True | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |         llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None} | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         final_answer = "" | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |         def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage): | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             if not final_llm_usage_dict["usage"]: | 
					
						
							|  |  |  |                 final_llm_usage_dict["usage"] = usage | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             else: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 llm_usage = final_llm_usage_dict["usage"] | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                 llm_usage.prompt_tokens += usage.prompt_tokens | 
					
						
							|  |  |  |                 llm_usage.completion_tokens += usage.completion_tokens | 
					
						
							|  |  |  |                 llm_usage.prompt_price += usage.prompt_price | 
					
						
							|  |  |  |                 llm_usage.completion_price += usage.completion_price | 
					
						
							| 
									
										
										
										
											2024-08-04 07:42:22 +01:00
										 |  |  |                 llm_usage.total_price += usage.total_price | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-30 15:25:37 +08:00
										 |  |  |         model_instance = self.model_instance | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |         while function_call_state and iteration_step <= max_iteration_steps: | 
					
						
							|  |  |  |             # continue to run until there is not any tool call | 
					
						
							|  |  |  |             function_call_state = False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             if iteration_step == max_iteration_steps: | 
					
						
							|  |  |  |                 # the last iteration, remove all tools | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 self._prompt_messages_tools = [] | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |             message_file_ids: list[str] = [] | 
					
						
							| 
									
										
										
										
											2024-01-24 12:09:30 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             agent_thought = self.create_agent_thought( | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 message_id=message.id, message="", tool_name="", tool_input="", messages_ids=message_file_ids | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             ) | 
					
						
							| 
									
										
										
										
											2024-01-24 12:09:30 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             if iteration_step > 1: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 self.queue_manager.publish( | 
					
						
							|  |  |  |                     QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER | 
					
						
							|  |  |  |                 ) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-04 14:15:53 +08:00
										 |  |  |             # recalc llm max tokens | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             prompt_messages = self._organize_prompt_messages() | 
					
						
							| 
									
										
										
										
											2024-03-04 13:32:17 +08:00
										 |  |  |             self.recalc_llm_max_tokens(self.model_config, prompt_messages) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             # invoke model | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |             chunks = model_instance.invoke_llm( | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                 prompt_messages=prompt_messages, | 
					
						
							| 
									
										
										
										
											2024-06-14 01:05:37 +08:00
										 |  |  |                 model_parameters=app_generate_entity.model_conf.parameters, | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                 tools=[], | 
					
						
							| 
									
										
										
										
											2024-06-14 01:05:37 +08:00
										 |  |  |                 stop=app_generate_entity.model_conf.stop, | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |                 stream=True, | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                 user=self.user_id, | 
					
						
							|  |  |  |                 callbacks=[], | 
					
						
							|  |  |  |             ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |             usage_dict: dict[str, Optional[LLMUsage]] = {} | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict) | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             scratchpad = AgentScratchpadUnit( | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 agent_response="", | 
					
						
							|  |  |  |                 thought="", | 
					
						
							|  |  |  |                 action_str="", | 
					
						
							|  |  |  |                 observation="", | 
					
						
							| 
									
										
										
										
											2024-02-27 19:15:07 +08:00
										 |  |  |                 action=None, | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-24 12:09:30 +08:00
										 |  |  |             # publish agent thought if it's first iteration | 
					
						
							|  |  |  |             if iteration_step == 1: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 self.queue_manager.publish( | 
					
						
							|  |  |  |                     QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER | 
					
						
							|  |  |  |                 ) | 
					
						
							| 
									
										
										
										
											2024-01-24 12:09:30 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             for chunk in react_chunks: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 if isinstance(chunk, AgentScratchpadUnit.Action): | 
					
						
							|  |  |  |                     action = chunk | 
					
						
							|  |  |  |                     # detect action | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                     assert scratchpad.agent_response is not None | 
					
						
							|  |  |  |                     scratchpad.agent_response += json.dumps(chunk.model_dump()) | 
					
						
							| 
									
										
										
										
											2024-06-14 01:05:37 +08:00
										 |  |  |                     scratchpad.action_str = json.dumps(chunk.model_dump()) | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                     scratchpad.action = action | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |                 else: | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                     assert scratchpad.agent_response is not None | 
					
						
							|  |  |  |                     scratchpad.agent_response += chunk | 
					
						
							|  |  |  |                     assert scratchpad.thought is not None | 
					
						
							|  |  |  |                     scratchpad.thought += chunk | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |                     yield LLMResultChunk( | 
					
						
							|  |  |  |                         model=self.model_config.model, | 
					
						
							|  |  |  |                         prompt_messages=prompt_messages, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         system_fingerprint="", | 
					
						
							|  |  |  |                         delta=LLMResultChunkDelta(index=0, message=AssistantPromptMessage(content=chunk), usage=None), | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |                     ) | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             assert scratchpad.thought is not None | 
					
						
							|  |  |  |             scratchpad.thought = scratchpad.thought.strip() or "I am thinking about how to help you" | 
					
						
							|  |  |  |             self._agent_scratchpad.append(scratchpad) | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             # get llm usage | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             if "usage" in usage_dict: | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |                 if usage_dict["usage"] is not None: | 
					
						
							|  |  |  |                     increase_usage(llm_usage, usage_dict["usage"]) | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             else: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 usage_dict["usage"] = LLMUsage.empty_usage() | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             self.save_agent_thought( | 
					
						
							|  |  |  |                 agent_thought=agent_thought, | 
					
						
							| 
									
										
										
										
											2025-01-22 13:20:32 +08:00
										 |  |  |                 tool_name=(scratchpad.action.action_name if scratchpad.action and not scratchpad.is_final() else ""), | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 tool_input={scratchpad.action.action_name: scratchpad.action.action_input} if scratchpad.action else {}, | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 tool_invoke_meta={}, | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |                 thought=scratchpad.thought or "", | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 observation="", | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |                 answer=scratchpad.agent_response or "", | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 messages_ids=[], | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 llm_usage=usage_dict["usage"], | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             ) | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             if not scratchpad.is_final(): | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 self.queue_manager.publish( | 
					
						
							|  |  |  |                     QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER | 
					
						
							|  |  |  |                 ) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             if not scratchpad.action: | 
					
						
							|  |  |  |                 # failed to extract action, return final answer directly | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 final_answer = "" | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             else: | 
					
						
							|  |  |  |                 if scratchpad.action.action_name.lower() == "final answer": | 
					
						
							|  |  |  |                     # action is final answer, return final answer directly | 
					
						
							|  |  |  |                     try: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                         if isinstance(scratchpad.action.action_input, dict): | 
					
						
							| 
									
										
										
										
											2025-04-15 15:36:44 +08:00
										 |  |  |                             final_answer = json.dumps(scratchpad.action.action_input, ensure_ascii=False) | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                         elif isinstance(scratchpad.action.action_input, str): | 
					
						
							|  |  |  |                             final_answer = scratchpad.action.action_input | 
					
						
							|  |  |  |                         else: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                             final_answer = f"{scratchpad.action.action_input}" | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                     except json.JSONDecodeError: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         final_answer = f"{scratchpad.action.action_input}" | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |                 else: | 
					
						
							|  |  |  |                     function_call_state = True | 
					
						
							|  |  |  |                     # action is tool call, invoke tool | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                     tool_invoke_response, tool_invoke_meta = self._handle_invoke_action( | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  |                         action=scratchpad.action, | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                         tool_instances=tool_instances, | 
					
						
							| 
									
										
										
										
											2024-06-26 17:33:29 +08:00
										 |  |  |                         message_file_ids=message_file_ids, | 
					
						
							|  |  |  |                         trace_manager=trace_manager, | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                     ) | 
					
						
							|  |  |  |                     scratchpad.observation = tool_invoke_response | 
					
						
							|  |  |  |                     scratchpad.agent_response = tool_invoke_response | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                     self.save_agent_thought( | 
					
						
							|  |  |  |                         agent_thought=agent_thought, | 
					
						
							|  |  |  |                         tool_name=scratchpad.action.action_name, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         tool_input={scratchpad.action.action_name: scratchpad.action.action_input}, | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |                         thought=scratchpad.thought or "", | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         observation={scratchpad.action.action_name: tool_invoke_response}, | 
					
						
							|  |  |  |                         tool_invoke_meta={scratchpad.action.action_name: tool_invoke_meta.to_dict()}, | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                         answer=scratchpad.agent_response, | 
					
						
							|  |  |  |                         messages_ids=message_file_ids, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         llm_usage=usage_dict["usage"], | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                     ) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                     self.queue_manager.publish( | 
					
						
							|  |  |  |                         QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER | 
					
						
							|  |  |  |                     ) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |                 # update prompt tool message | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 for prompt_tool in self._prompt_messages_tools: | 
					
						
							| 
									
										
										
										
											2024-06-26 17:33:29 +08:00
										 |  |  |                     self.update_prompt_message_tool(tool_instances[prompt_tool.name], prompt_tool) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             iteration_step += 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         yield LLMResultChunk( | 
					
						
							|  |  |  |             model=model_instance.model, | 
					
						
							|  |  |  |             prompt_messages=prompt_messages, | 
					
						
							|  |  |  |             delta=LLMResultChunkDelta( | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 index=0, message=AssistantPromptMessage(content=final_answer), usage=llm_usage["usage"] | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             ), | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             system_fingerprint="", | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # save agent thought | 
					
						
							|  |  |  |         self.save_agent_thought( | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  |             agent_thought=agent_thought, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             tool_name="", | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  |             tool_input={}, | 
					
						
							|  |  |  |             tool_invoke_meta={}, | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             thought=final_answer, | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  |             observation={}, | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             answer=final_answer, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             messages_ids=[], | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |         ) | 
					
						
							|  |  |  |         # publish end event | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         self.queue_manager.publish( | 
					
						
							|  |  |  |             QueueMessageEndEvent( | 
					
						
							|  |  |  |                 llm_result=LLMResult( | 
					
						
							|  |  |  |                     model=model_instance.model, | 
					
						
							|  |  |  |                     prompt_messages=prompt_messages, | 
					
						
							|  |  |  |                     message=AssistantPromptMessage(content=final_answer), | 
					
						
							| 
									
										
										
										
											2024-09-12 15:50:49 +08:00
										 |  |  |                     usage=llm_usage["usage"] or LLMUsage.empty_usage(), | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                     system_fingerprint="", | 
					
						
							|  |  |  |                 ) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  |             ), | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             PublishFrom.APPLICATION_MANAGER, | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _handle_invoke_action( | 
					
						
							|  |  |  |         self, | 
					
						
							|  |  |  |         action: AgentScratchpadUnit.Action, | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         tool_instances: Mapping[str, Tool], | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         message_file_ids: list[str], | 
					
						
							|  |  |  |         trace_manager: Optional[TraceQueueManager] = None, | 
					
						
							|  |  |  |     ) -> tuple[str, ToolInvokeMeta]: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  |         handle invoke action | 
					
						
							|  |  |  |         :param action: action | 
					
						
							|  |  |  |         :param tool_instances: tool instances | 
					
						
							| 
									
										
										
										
											2024-08-16 13:19:01 +07:00
										 |  |  |         :param message_file_ids: message file ids | 
					
						
							|  |  |  |         :param trace_manager: trace manager | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         :return: observation, meta | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         # action is tool call, invoke tool | 
					
						
							|  |  |  |         tool_call_name = action.action_name | 
					
						
							|  |  |  |         tool_call_args = action.action_input | 
					
						
							|  |  |  |         tool_instance = tool_instances.get(tool_call_name) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if not tool_instance: | 
					
						
							|  |  |  |             answer = f"there is not a tool named {tool_call_name}" | 
					
						
							|  |  |  |             return answer, ToolInvokeMeta.error_instance(answer) | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         if isinstance(tool_call_args, str): | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  |             try: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                 tool_call_args = json.loads(tool_call_args) | 
					
						
							|  |  |  |             except json.JSONDecodeError: | 
					
						
							|  |  |  |                 pass | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # invoke tool | 
					
						
							|  |  |  |         tool_invoke_response, message_files, tool_invoke_meta = ToolEngine.agent_invoke( | 
					
						
							|  |  |  |             tool=tool_instance, | 
					
						
							|  |  |  |             tool_parameters=tool_call_args, | 
					
						
							|  |  |  |             user_id=self.user_id, | 
					
						
							|  |  |  |             tenant_id=self.tenant_id, | 
					
						
							|  |  |  |             message=self.message, | 
					
						
							|  |  |  |             invoke_from=self.application_generate_entity.invoke_from, | 
					
						
							| 
									
										
										
										
											2024-06-26 17:33:29 +08:00
										 |  |  |             agent_tool_callback=self.agent_callback, | 
					
						
							|  |  |  |             trace_manager=trace_manager, | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         ) | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         # publish files | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         for message_file_id in message_files: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             # publish message file | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             self.queue_manager.publish( | 
					
						
							|  |  |  |                 QueueMessageFileEvent(message_file_id=message_file_id), PublishFrom.APPLICATION_MANAGER | 
					
						
							|  |  |  |             ) | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             # add message file ids | 
					
						
							| 
									
										
										
										
											2024-06-26 13:21:40 +08:00
										 |  |  |             message_file_ids.append(message_file_id) | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         return tool_invoke_response, tool_invoke_meta | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _convert_dict_to_action(self, action: dict) -> AgentScratchpadUnit.Action: | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         convert dict to action | 
					
						
							|  |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         return AgentScratchpadUnit.Action(action_name=action["action"], action_input=action["action_input"]) | 
					
						
							| 
									
										
										
										
											2024-02-21 10:45:59 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |     def _fill_in_inputs_from_external_data_tools(self, instruction: str, inputs: Mapping[str, Any]) -> str: | 
					
						
							| 
									
										
										
										
											2024-02-05 18:11:06 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  |         fill in inputs from external data tools | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         for key, value in inputs.items(): | 
					
						
							|  |  |  |             try: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 instruction = instruction.replace(f"{{{{{key}}}}}", str(value)) | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |             except Exception: | 
					
						
							| 
									
										
										
										
											2024-02-05 18:11:06 +08:00
										 |  |  |                 continue | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return instruction | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |     def _init_react_state(self, query) -> None: | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  |         init agent scratchpad | 
					
						
							|  |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         self._query = query | 
					
						
							|  |  |  |         self._agent_scratchpad = [] | 
					
						
							|  |  |  |         self._historic_prompt_messages = self._organize_historic_prompt_messages() | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |     @abstractmethod | 
					
						
							|  |  |  |     def _organize_prompt_messages(self) -> list[PromptMessage]: | 
					
						
							|  |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         organize prompt messages | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _format_assistant_message(self, agent_scratchpad: list[AgentScratchpadUnit]) -> str: | 
					
						
							|  |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         format assistant message | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         message = "" | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         for scratchpad in agent_scratchpad: | 
					
						
							|  |  |  |             if scratchpad.is_final(): | 
					
						
							|  |  |  |                 message += f"Final Answer: {scratchpad.agent_response}" | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 message += f"Thought: {scratchpad.thought}\n\n" | 
					
						
							|  |  |  |                 if scratchpad.action_str: | 
					
						
							|  |  |  |                     message += f"Action: {scratchpad.action_str}\n\n" | 
					
						
							|  |  |  |                 if scratchpad.observation: | 
					
						
							|  |  |  |                     message += f"Observation: {scratchpad.observation}\n\n" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return message | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |     def _organize_historic_prompt_messages( | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         self, current_session_messages: list[PromptMessage] | None = None | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |     ) -> list[PromptMessage]: | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         """
 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         organize historic prompt messages | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |         """
 | 
					
						
							|  |  |  |         result: list[PromptMessage] = [] | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |         scratchpads: list[AgentScratchpadUnit] = [] | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |         current_scratchpad: AgentScratchpadUnit | None = None | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         for message in self.history_prompt_messages: | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |             if isinstance(message, AssistantPromptMessage): | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |                 if not current_scratchpad: | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                     assert isinstance(message.content, str) | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |                     current_scratchpad = AgentScratchpadUnit( | 
					
						
							|  |  |  |                         agent_response=message.content, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         thought=message.content or "I am thinking about how to help you", | 
					
						
							|  |  |  |                         action_str="", | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |                         action=None, | 
					
						
							|  |  |  |                         observation=None, | 
					
						
							|  |  |  |                     ) | 
					
						
							|  |  |  |                     scratchpads.append(current_scratchpad) | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |                 if message.tool_calls: | 
					
						
							|  |  |  |                     try: | 
					
						
							|  |  |  |                         current_scratchpad.action = AgentScratchpadUnit.Action( | 
					
						
							|  |  |  |                             action_name=message.tool_calls[0].function.name, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                             action_input=json.loads(message.tool_calls[0].function.arguments), | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |                         ) | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                         current_scratchpad.action_str = json.dumps(current_scratchpad.action.to_dict()) | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |                     except: | 
					
						
							|  |  |  |                         pass | 
					
						
							|  |  |  |             elif isinstance(message, ToolPromptMessage): | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                 if current_scratchpad: | 
					
						
							|  |  |  |                     assert isinstance(message.content, str) | 
					
						
							| 
									
										
										
										
											2024-02-20 19:03:43 +08:00
										 |  |  |                     current_scratchpad.observation = message.content | 
					
						
							| 
									
										
										
										
											2024-12-24 18:38:51 +08:00
										 |  |  |                 else: | 
					
						
							|  |  |  |                     raise NotImplementedError("expected str type") | 
					
						
							| 
									
										
										
										
											2024-04-11 18:34:17 +08:00
										 |  |  |             elif isinstance(message, UserPromptMessage): | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |                 if scratchpads: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                     result.append(AssistantPromptMessage(content=self._format_assistant_message(scratchpads))) | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |                     scratchpads = [] | 
					
						
							|  |  |  |                     current_scratchpad = None | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 result.append(message) | 
					
						
							| 
									
										
										
										
											2024-01-23 19:58:23 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-06-15 10:53:30 +08:00
										 |  |  |         if scratchpads: | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             result.append(AssistantPromptMessage(content=self._format_assistant_message(scratchpads))) | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         historic_prompts = AgentHistoryPromptTransform( | 
					
						
							|  |  |  |             model_config=self.model_config, | 
					
						
							|  |  |  |             prompt_messages=current_session_messages or [], | 
					
						
							|  |  |  |             history_messages=result, | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             memory=self.memory, | 
					
						
							| 
									
										
										
										
											2024-06-17 21:20:17 +08:00
										 |  |  |         ).get_prompt() | 
					
						
							|  |  |  |         return historic_prompts |