2023-11-04 03:38:43 -07:00
|
|
|
import os
|
|
|
|
import json
|
2023-11-14 13:14:38 -08:00
|
|
|
import autogen
|
2023-11-04 03:38:43 -07:00
|
|
|
import testbed_utils
|
|
|
|
|
|
|
|
testbed_utils.init()
|
|
|
|
##############################
|
|
|
|
|
2024-01-25 16:46:58 -08:00
|
|
|
config_list = autogen.config_list_from_json("OAI_CONFIG_LIST")
|
2023-11-04 03:38:43 -07:00
|
|
|
|
2023-11-14 13:14:38 -08:00
|
|
|
assistant = autogen.AssistantAgent(
|
2023-11-04 03:38:43 -07:00
|
|
|
"assistant",
|
|
|
|
is_termination_msg=lambda x: x.get("content", "").rstrip().find("TERMINATE") >= 0,
|
2023-11-14 13:14:38 -08:00
|
|
|
llm_config=testbed_utils.default_llm_config(config_list, timeout=180),
|
2023-11-04 03:38:43 -07:00
|
|
|
)
|
2023-11-14 13:14:38 -08:00
|
|
|
user_proxy = autogen.UserProxyAgent(
|
2023-11-04 03:38:43 -07:00
|
|
|
"user_proxy",
|
|
|
|
human_input_mode="NEVER",
|
|
|
|
is_termination_msg=lambda x: x.get("content", "").rstrip().find("TERMINATE") >= 0,
|
|
|
|
code_execution_config={
|
|
|
|
"work_dir": "coding",
|
|
|
|
"use_docker": False,
|
|
|
|
},
|
|
|
|
max_consecutive_auto_reply=10,
|
2023-11-30 08:43:03 -08:00
|
|
|
default_auto_reply="",
|
|
|
|
)
|
|
|
|
user_proxy.initiate_chat(
|
|
|
|
assistant,
|
|
|
|
message="""
|
|
|
|
__PROMPT__
|
|
|
|
""".strip(),
|
2023-11-04 03:38:43 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
##############################
|
|
|
|
testbed_utils.finalize(agents=[assistant, user_proxy])
|