diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_orchestrator.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_orchestrator.py index e92177001..e46ff46ff 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_orchestrator.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_orchestrator.py @@ -47,6 +47,7 @@ from ._prompts import ( ORCHESTRATOR_TASK_LEDGER_FULL_PROMPT, ORCHESTRATOR_TASK_LEDGER_PLAN_PROMPT, ORCHESTRATOR_TASK_LEDGER_PLAN_UPDATE_PROMPT, + LedgerEntry, ) trace_logger = logging.getLogger(TRACE_LOGGER_NAME) @@ -309,7 +310,18 @@ class MagenticOneOrchestrator(BaseGroupChatManager): assert self._max_json_retries > 0 key_error: bool = False for _ in range(self._max_json_retries): - response = await self._model_client.create(self._get_compatible_context(context), json_output=True) + if self._model_client.model_info.get("structured_output", False): + response = await self._model_client.create( + self._get_compatible_context(context), json_output=LedgerEntry + ) + elif self._model_client.model_info.get("json_output", False): + response = await self._model_client.create( + self._get_compatible_context(context), cancellation_token=cancellation_token, json_output=True + ) + else: + response = await self._model_client.create( + self._get_compatible_context(context), cancellation_token=cancellation_token + ) ledger_str = response.content try: assert isinstance(ledger_str, str) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_prompts.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_prompts.py index bc3f1b20e..846d06999 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_prompts.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_prompts.py @@ -1,3 +1,5 @@ +from pydantic import BaseModel + ORCHESTRATOR_SYSTEM_MESSAGE = "" @@ -98,6 +100,24 @@ Please output an answer in pure JSON format according to the following schema. T """ +class LedgerEntryBooleanAnswer(BaseModel): + reason: str + answer: bool + + +class LedgerEntryStringAnswer(BaseModel): + reason: str + answer: str + + +class LedgerEntry(BaseModel): + is_request_satisfied: LedgerEntryBooleanAnswer + is_in_loop: LedgerEntryBooleanAnswer + is_progress_being_made: LedgerEntryBooleanAnswer + next_speaker: LedgerEntryStringAnswer + instruction_or_question: LedgerEntryStringAnswer + + ORCHESTRATOR_TASK_LEDGER_FACTS_UPDATE_PROMPT = """As a reminder, we are working to solve the following task: {task}