2024-06-09 18:06:51 -07:00
|
|
|
|
import argparse
|
|
|
|
|
import asyncio
|
|
|
|
|
import json
|
2024-06-17 17:20:46 -07:00
|
|
|
|
import logging
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
|
2024-06-09 18:06:51 -07:00
|
|
|
|
from agnext.application import SingleThreadedAgentRuntime
|
2024-08-26 10:30:28 -04:00
|
|
|
|
from agnext.components import DefaultTopicId, RoutedAgent, message_handler
|
|
|
|
|
from agnext.components._default_subscription import DefaultSubscription
|
2024-06-21 04:06:01 -07:00
|
|
|
|
from agnext.components.memory import ChatMemory
|
2024-06-28 23:15:46 -07:00
|
|
|
|
from agnext.components.models import ChatCompletionClient, SystemMessage
|
2024-08-20 14:41:24 -04:00
|
|
|
|
from agnext.core import AgentId, AgentInstantiationContext, AgentProxy, AgentRuntime
|
2024-06-21 04:06:01 -07:00
|
|
|
|
|
|
|
|
|
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
|
2024-06-25 13:23:29 -07:00
|
|
|
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
2024-06-21 04:06:01 -07:00
|
|
|
|
|
2024-08-16 23:14:09 -04:00
|
|
|
|
from agnext.core import MessageContext
|
2024-06-25 13:23:29 -07:00
|
|
|
|
from common.memory import BufferedChatMemory
|
|
|
|
|
from common.types import Message, TextMessage
|
2024-06-28 23:15:46 -07:00
|
|
|
|
from common.utils import convert_messages_to_llm_messages, get_chat_completion_client_from_envs
|
2024-07-01 11:53:45 -04:00
|
|
|
|
from utils import TextualChatApp, TextualUserAgent
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Define a custom agent that can handle chat room messages.
|
2024-08-23 13:22:31 -07:00
|
|
|
|
class ChatRoomAgent(RoutedAgent):
|
2024-06-21 04:06:01 -07:00
|
|
|
|
def __init__(
|
2024-06-09 18:06:51 -07:00
|
|
|
|
self,
|
|
|
|
|
name: str,
|
|
|
|
|
description: str,
|
|
|
|
|
background_story: str,
|
2024-06-21 04:06:01 -07:00
|
|
|
|
memory: ChatMemory[Message],
|
|
|
|
|
model_client: ChatCompletionClient,
|
|
|
|
|
) -> None:
|
2024-06-18 14:53:18 -04:00
|
|
|
|
super().__init__(description)
|
2024-06-09 18:06:51 -07:00
|
|
|
|
system_prompt = f"""Your name is {name}.
|
|
|
|
|
Your background story is:
|
|
|
|
|
{background_story}
|
|
|
|
|
|
|
|
|
|
Now you are in a chat room with other users.
|
|
|
|
|
You can send messages to the chat room by typing your message below.
|
|
|
|
|
You do not need to respond to every message.
|
|
|
|
|
Use the following JSON format to provide your thought on the latest message and choose whether to respond:
|
|
|
|
|
{{
|
|
|
|
|
"thought": "Your thought on the message",
|
|
|
|
|
"respond": <true/false>,
|
|
|
|
|
"response": "Your response to the message or None if you choose not to respond."
|
|
|
|
|
}}
|
|
|
|
|
"""
|
|
|
|
|
self._system_messages = [SystemMessage(system_prompt)]
|
|
|
|
|
self._memory = memory
|
|
|
|
|
self._client = model_client
|
|
|
|
|
|
2024-06-21 04:06:01 -07:00
|
|
|
|
@message_handler()
|
2024-08-16 23:14:09 -04:00
|
|
|
|
async def on_chat_room_message(self, message: TextMessage, ctx: MessageContext) -> None:
|
2024-06-09 18:06:51 -07:00
|
|
|
|
# Save the message to memory as structured JSON.
|
|
|
|
|
from_message = TextMessage(
|
|
|
|
|
content=json.dumps({"sender": message.source, "content": message.content}), source=message.source
|
|
|
|
|
)
|
2024-06-11 00:46:52 -07:00
|
|
|
|
await self._memory.add_message(from_message)
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
# Get a response from the model.
|
|
|
|
|
raw_response = await self._client.create(
|
2024-06-11 00:46:52 -07:00
|
|
|
|
self._system_messages
|
2024-08-07 16:08:13 -04:00
|
|
|
|
+ convert_messages_to_llm_messages(await self._memory.get_messages(), self_name=self.metadata["type"]),
|
2024-06-09 18:06:51 -07:00
|
|
|
|
json_output=True,
|
|
|
|
|
)
|
|
|
|
|
assert isinstance(raw_response.content, str)
|
|
|
|
|
|
|
|
|
|
# Save the response to memory.
|
2024-08-07 16:08:13 -04:00
|
|
|
|
await self._memory.add_message(TextMessage(source=self.metadata["type"], content=raw_response.content))
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
# Parse the response.
|
|
|
|
|
data = json.loads(raw_response.content)
|
|
|
|
|
respond = data.get("respond")
|
|
|
|
|
response = data.get("response")
|
|
|
|
|
|
|
|
|
|
# Publish the response if needed.
|
|
|
|
|
if respond is True or str(respond).lower().strip() == "true":
|
2024-08-20 14:41:24 -04:00
|
|
|
|
await self.publish_message(
|
2024-08-26 10:30:28 -04:00
|
|
|
|
TextMessage(source=self.metadata["type"], content=str(response)), topic_id=DefaultTopicId()
|
2024-08-20 14:41:24 -04:00
|
|
|
|
)
|
2024-06-17 17:20:46 -07:00
|
|
|
|
|
|
|
|
|
|
2024-06-21 04:06:01 -07:00
|
|
|
|
class ChatRoomUserAgent(TextualUserAgent):
|
2024-06-17 17:20:46 -07:00
|
|
|
|
"""An agent that is used to receive messages from the runtime."""
|
|
|
|
|
|
2024-06-21 04:06:01 -07:00
|
|
|
|
@message_handler
|
2024-08-16 23:14:09 -04:00
|
|
|
|
async def on_chat_room_message(self, message: TextMessage, ctx: MessageContext) -> None:
|
2024-06-17 17:20:46 -07:00
|
|
|
|
await self._app.post_runtime_message(message)
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Define a chat room with participants -- the runtime is the chat room.
|
2024-07-23 11:49:38 -07:00
|
|
|
|
async def chat_room(runtime: AgentRuntime, app: TextualChatApp) -> None:
|
|
|
|
|
await runtime.register(
|
2024-06-18 14:53:18 -04:00
|
|
|
|
"User",
|
|
|
|
|
lambda: ChatRoomUserAgent(
|
|
|
|
|
description="The user in the chat room.",
|
|
|
|
|
app=app,
|
|
|
|
|
),
|
2024-08-26 10:30:28 -04:00
|
|
|
|
lambda: [DefaultSubscription()],
|
2024-06-17 17:20:46 -07:00
|
|
|
|
)
|
2024-08-20 14:41:24 -04:00
|
|
|
|
await runtime.register(
|
2024-06-18 14:53:18 -04:00
|
|
|
|
"Alice",
|
2024-08-02 11:02:45 -04:00
|
|
|
|
lambda: ChatRoomAgent(
|
2024-08-07 13:25:44 -04:00
|
|
|
|
name=AgentInstantiationContext.current_agent_id().type,
|
2024-06-18 14:53:18 -04:00
|
|
|
|
description="Alice in the chat room.",
|
|
|
|
|
background_story="Alice is a software engineer who loves to code.",
|
|
|
|
|
memory=BufferedChatMemory(buffer_size=10),
|
2024-06-28 23:15:46 -07:00
|
|
|
|
model_client=get_chat_completion_client_from_envs(model="gpt-4-turbo"),
|
2024-06-18 14:53:18 -04:00
|
|
|
|
),
|
2024-08-26 10:30:28 -04:00
|
|
|
|
lambda: [DefaultSubscription()],
|
2024-06-09 18:06:51 -07:00
|
|
|
|
)
|
2024-08-20 14:41:24 -04:00
|
|
|
|
alice = AgentProxy(AgentId("Alice", "default"), runtime)
|
|
|
|
|
await runtime.register(
|
2024-06-18 14:53:18 -04:00
|
|
|
|
"Bob",
|
2024-08-02 11:02:45 -04:00
|
|
|
|
lambda: ChatRoomAgent(
|
2024-08-07 13:25:44 -04:00
|
|
|
|
name=AgentInstantiationContext.current_agent_id().type,
|
2024-06-18 14:53:18 -04:00
|
|
|
|
description="Bob in the chat room.",
|
|
|
|
|
background_story="Bob is a data scientist who loves to analyze data.",
|
|
|
|
|
memory=BufferedChatMemory(buffer_size=10),
|
2024-06-28 23:15:46 -07:00
|
|
|
|
model_client=get_chat_completion_client_from_envs(model="gpt-4-turbo"),
|
2024-06-18 14:53:18 -04:00
|
|
|
|
),
|
2024-08-26 10:30:28 -04:00
|
|
|
|
lambda: [DefaultSubscription()],
|
2024-06-09 18:06:51 -07:00
|
|
|
|
)
|
2024-08-20 14:41:24 -04:00
|
|
|
|
bob = AgentProxy(AgentId("Bob", "default"), runtime)
|
|
|
|
|
await runtime.register(
|
2024-06-18 14:53:18 -04:00
|
|
|
|
"Charlie",
|
2024-08-02 11:02:45 -04:00
|
|
|
|
lambda: ChatRoomAgent(
|
2024-08-07 13:25:44 -04:00
|
|
|
|
name=AgentInstantiationContext.current_agent_id().type,
|
2024-06-18 14:53:18 -04:00
|
|
|
|
description="Charlie in the chat room.",
|
|
|
|
|
background_story="Charlie is a designer who loves to create art.",
|
|
|
|
|
memory=BufferedChatMemory(buffer_size=10),
|
2024-06-28 23:15:46 -07:00
|
|
|
|
model_client=get_chat_completion_client_from_envs(model="gpt-4-turbo"),
|
2024-06-18 14:53:18 -04:00
|
|
|
|
),
|
2024-08-26 10:30:28 -04:00
|
|
|
|
lambda: [DefaultSubscription()],
|
2024-06-09 18:06:51 -07:00
|
|
|
|
)
|
2024-08-20 14:41:24 -04:00
|
|
|
|
charlie = AgentProxy(AgentId("Charlie", "default"), runtime)
|
2024-06-17 17:20:46 -07:00
|
|
|
|
app.welcoming_notice = f"""Welcome to the chat room demo with the following participants:
|
2024-08-07 13:25:44 -04:00
|
|
|
|
1. 👧 {alice.id.type}: {(await alice.metadata)['description']}
|
|
|
|
|
2. 👱🏼♂️ {bob.id.type}: {(await bob.metadata)['description']}
|
|
|
|
|
3. 👨🏾🦳 {charlie.id.type}: {(await charlie.metadata)['description']}
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
2024-06-17 17:20:46 -07:00
|
|
|
|
Each participant decides on its own whether to respond to the latest message.
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
2024-06-17 17:20:46 -07:00
|
|
|
|
You can greet the chat room by typing your first message below.
|
|
|
|
|
"""
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
|
2024-06-17 17:20:46 -07:00
|
|
|
|
async def main() -> None:
|
2024-06-09 18:06:51 -07:00
|
|
|
|
runtime = SingleThreadedAgentRuntime()
|
2024-06-17 17:20:46 -07:00
|
|
|
|
app = TextualChatApp(runtime, user_name="You")
|
2024-07-23 11:49:38 -07:00
|
|
|
|
await chat_room(runtime, app)
|
2024-08-21 13:59:59 -07:00
|
|
|
|
runtime.start()
|
2024-06-17 17:20:46 -07:00
|
|
|
|
await app.run_async()
|
2024-06-09 18:06:51 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2024-06-17 17:20:46 -07:00
|
|
|
|
parser = argparse.ArgumentParser(description="Chat room demo with self-driving AI agents.")
|
|
|
|
|
parser.add_argument("--verbose", action="store_true", help="Enable verbose logging.")
|
2024-06-09 18:06:51 -07:00
|
|
|
|
args = parser.parse_args()
|
2024-06-17 17:20:46 -07:00
|
|
|
|
if args.verbose:
|
|
|
|
|
logging.basicConfig(level=logging.WARNING)
|
|
|
|
|
logging.getLogger("agnext").setLevel(logging.DEBUG)
|
|
|
|
|
handler = logging.FileHandler("chat_room.log")
|
|
|
|
|
logging.getLogger("agnext").addHandler(handler)
|
|
|
|
|
asyncio.run(main())
|