mirror of
https://github.com/microsoft/autogen.git
synced 2025-12-24 13:39:24 +00:00
Ensure 'name' on initial message (#2635)
* Update to ensure name on initial messages * Corrected test cases for messages now including names. * Added name to messages within select speaker nested chat * Corrected select speaker group chat tests for name field --------- Co-authored-by: Chi Wang <wang.chi@microsoft.com>
This commit is contained in:
parent
a638a08f81
commit
77ae3c09a5
@ -621,7 +621,7 @@ class ConversableAgent(LLMAgent):
|
||||
raise ValueError(f"Invalid name: {name}. Name must be less than 64 characters.")
|
||||
return name
|
||||
|
||||
def _append_oai_message(self, message: Union[Dict, str], role, conversation_id: Agent) -> bool:
|
||||
def _append_oai_message(self, message: Union[Dict, str], role, conversation_id: Agent, is_sending: bool) -> bool:
|
||||
"""Append a message to the ChatCompletion conversation.
|
||||
|
||||
If the message received is a string, it will be put in the "content" field of the new dictionary.
|
||||
@ -633,6 +633,7 @@ class ConversableAgent(LLMAgent):
|
||||
message (dict or str): message to be appended to the ChatCompletion conversation.
|
||||
role (str): role of the message, can be "assistant" or "function".
|
||||
conversation_id (Agent): id of the conversation, should be the recipient or sender.
|
||||
is_sending (bool): If the agent (aka self) is sending to the conversation_id agent, otherwise receiving.
|
||||
|
||||
Returns:
|
||||
bool: whether the message is appended to the ChatCompletion conversation.
|
||||
@ -662,7 +663,15 @@ class ConversableAgent(LLMAgent):
|
||||
|
||||
if oai_message.get("function_call", False) or oai_message.get("tool_calls", False):
|
||||
oai_message["role"] = "assistant" # only messages with role 'assistant' can have a function call.
|
||||
elif "name" not in oai_message:
|
||||
# If we don't have a name field, append it
|
||||
if is_sending:
|
||||
oai_message["name"] = self.name
|
||||
else:
|
||||
oai_message["name"] = conversation_id.name
|
||||
|
||||
self._oai_messages[conversation_id].append(oai_message)
|
||||
|
||||
return True
|
||||
|
||||
def _process_message_before_send(
|
||||
@ -718,7 +727,7 @@ class ConversableAgent(LLMAgent):
|
||||
message = self._process_message_before_send(message, recipient, ConversableAgent._is_silent(self, silent))
|
||||
# When the agent composes and sends the message, the role of the message is "assistant"
|
||||
# unless it's "function".
|
||||
valid = self._append_oai_message(message, "assistant", recipient)
|
||||
valid = self._append_oai_message(message, "assistant", recipient, is_sending=True)
|
||||
if valid:
|
||||
recipient.receive(message, self, request_reply, silent)
|
||||
else:
|
||||
@ -768,7 +777,7 @@ class ConversableAgent(LLMAgent):
|
||||
message = self._process_message_before_send(message, recipient, ConversableAgent._is_silent(self, silent))
|
||||
# When the agent composes and sends the message, the role of the message is "assistant"
|
||||
# unless it's "function".
|
||||
valid = self._append_oai_message(message, "assistant", recipient)
|
||||
valid = self._append_oai_message(message, "assistant", recipient, is_sending=True)
|
||||
if valid:
|
||||
await recipient.a_receive(message, self, request_reply, silent)
|
||||
else:
|
||||
@ -839,7 +848,7 @@ class ConversableAgent(LLMAgent):
|
||||
|
||||
def _process_received_message(self, message: Union[Dict, str], sender: Agent, silent: bool):
|
||||
# When the agent receives a message, the role of the message is "user". (If 'role' exists and is 'function', it will remain unchanged.)
|
||||
valid = self._append_oai_message(message, "user", sender)
|
||||
valid = self._append_oai_message(message, "user", sender, is_sending=False)
|
||||
if logging_enabled():
|
||||
log_event(self, "received_message", message=message, sender=sender.name, valid=valid)
|
||||
|
||||
|
||||
@ -649,6 +649,7 @@ class GroupChat:
|
||||
if self.select_speaker_prompt_template is not None:
|
||||
start_message = {
|
||||
"content": self.select_speaker_prompt(agents),
|
||||
"name": "checking_agent",
|
||||
"override_role": self.role_for_select_speaker_messages,
|
||||
}
|
||||
else:
|
||||
@ -813,6 +814,7 @@ class GroupChat:
|
||||
|
||||
return True, {
|
||||
"content": self.select_speaker_auto_multiple_template.format(agentlist=agentlist),
|
||||
"name": "checking_agent",
|
||||
"override_role": self.role_for_select_speaker_messages,
|
||||
}
|
||||
else:
|
||||
@ -842,6 +844,7 @@ class GroupChat:
|
||||
|
||||
return True, {
|
||||
"content": self.select_speaker_auto_none_template.format(agentlist=agentlist),
|
||||
"name": "checking_agent",
|
||||
"override_role": self.role_for_select_speaker_messages,
|
||||
}
|
||||
else:
|
||||
|
||||
@ -724,7 +724,7 @@ def test_clear_agents_history():
|
||||
agent1_history = list(agent1._oai_messages.values())[0]
|
||||
agent2_history = list(agent2._oai_messages.values())[0]
|
||||
assert agent1_history == [
|
||||
{"content": "hello", "role": "assistant"},
|
||||
{"content": "hello", "role": "assistant", "name": "alice"},
|
||||
{"content": "This is bob speaking.", "name": "bob", "role": "user"},
|
||||
{"content": "How you doing?", "name": "sam", "role": "user"},
|
||||
]
|
||||
@ -745,7 +745,7 @@ def test_clear_agents_history():
|
||||
{"content": "How you doing?", "name": "sam", "role": "user"},
|
||||
]
|
||||
assert agent2_history == [
|
||||
{"content": "This is bob speaking.", "role": "assistant"},
|
||||
{"content": "This is bob speaking.", "role": "assistant", "name": "bob"},
|
||||
{"content": "How you doing?", "name": "sam", "role": "user"},
|
||||
]
|
||||
assert groupchat.messages == [
|
||||
@ -759,12 +759,12 @@ def test_clear_agents_history():
|
||||
agent1_history = list(agent1._oai_messages.values())[0]
|
||||
agent2_history = list(agent2._oai_messages.values())[0]
|
||||
assert agent1_history == [
|
||||
{"content": "hello", "role": "assistant"},
|
||||
{"content": "hello", "role": "assistant", "name": "alice"},
|
||||
{"content": "This is bob speaking.", "name": "bob", "role": "user"},
|
||||
{"content": "How you doing?", "name": "sam", "role": "user"},
|
||||
]
|
||||
assert agent2_history == [
|
||||
{"content": "This is bob speaking.", "role": "assistant"},
|
||||
{"content": "This is bob speaking.", "role": "assistant", "name": "bob"},
|
||||
{"content": "How you doing?", "name": "sam", "role": "user"},
|
||||
]
|
||||
assert groupchat.messages == [
|
||||
@ -822,6 +822,7 @@ def test_clear_agents_history():
|
||||
"content": "example tool response",
|
||||
"tool_responses": [{"tool_call_id": "call_emulated", "role": "tool", "content": "example tool response"}],
|
||||
"role": "tool",
|
||||
"name": "alice",
|
||||
},
|
||||
]
|
||||
|
||||
@ -1218,7 +1219,7 @@ def test_role_for_select_speaker_messages():
|
||||
# into a message attribute called 'override_role'. This is evaluated in Conversable Agent's _append_oai_message function
|
||||
# e.g.: message={'content':self.select_speaker_prompt(agents),'override_role':self.role_for_select_speaker_messages},
|
||||
message = {"content": "A prompt goes here.", "override_role": groupchat.role_for_select_speaker_messages}
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent)
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent, is_sending=True)
|
||||
|
||||
# Test default is "system"
|
||||
assert len(checking_agent.chat_messages) == 1
|
||||
@ -1227,7 +1228,7 @@ def test_role_for_select_speaker_messages():
|
||||
# Test as "user"
|
||||
groupchat.role_for_select_speaker_messages = "user"
|
||||
message = {"content": "A prompt goes here.", "override_role": groupchat.role_for_select_speaker_messages}
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent)
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent, is_sending=True)
|
||||
|
||||
assert len(checking_agent.chat_messages) == 1
|
||||
assert checking_agent.chat_messages[speaker_selection_agent][-1]["role"] == "user"
|
||||
@ -1235,7 +1236,7 @@ def test_role_for_select_speaker_messages():
|
||||
# Test as something unusual
|
||||
groupchat.role_for_select_speaker_messages = "SockS"
|
||||
message = {"content": "A prompt goes here.", "override_role": groupchat.role_for_select_speaker_messages}
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent)
|
||||
checking_agent._append_oai_message(message, "assistant", speaker_selection_agent, is_sending=True)
|
||||
|
||||
assert len(checking_agent.chat_messages) == 1
|
||||
assert checking_agent.chat_messages[speaker_selection_agent][-1]["role"] == "SockS"
|
||||
@ -1646,6 +1647,7 @@ def test_speaker_selection_validate_speaker_name():
|
||||
True,
|
||||
{
|
||||
"content": groupchat.select_speaker_auto_multiple_template.format(agentlist=agent_list_string),
|
||||
"name": "checking_agent",
|
||||
"override_role": groupchat.role_for_select_speaker_messages,
|
||||
},
|
||||
)
|
||||
@ -1692,6 +1694,7 @@ def test_speaker_selection_validate_speaker_name():
|
||||
True,
|
||||
{
|
||||
"content": groupchat.select_speaker_auto_none_template.format(agentlist=agent_list_string),
|
||||
"name": "checking_agent",
|
||||
"override_role": groupchat.role_for_select_speaker_messages,
|
||||
},
|
||||
)
|
||||
@ -1761,6 +1764,7 @@ def test_select_speaker_auto_messages():
|
||||
True,
|
||||
{
|
||||
"content": custom_multiple_names_msg.replace("{agentlist}", "['Alice', 'Bob']"),
|
||||
"name": "checking_agent",
|
||||
"override_role": groupchat.role_for_select_speaker_messages,
|
||||
},
|
||||
)
|
||||
@ -1770,6 +1774,7 @@ def test_select_speaker_auto_messages():
|
||||
True,
|
||||
{
|
||||
"content": custom_no_names_msg.replace("{agentlist}", "['Alice', 'Bob']"),
|
||||
"name": "checking_agent",
|
||||
"override_role": groupchat.role_for_select_speaker_messages,
|
||||
},
|
||||
)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user