mirror of
https://github.com/microsoft/autogen.git
synced 2025-08-03 06:12:22 +00:00

* api_base -> base_url (#383) * InvalidRequestError -> BadRequestError (#389) * remove api_key_path; close #388 * close #402 (#403) * openai client (#419) * openai client * client test * _client -> client * _client -> client * extra kwargs * Completion -> client (#426) * Completion -> client * Completion -> client * Completion -> client * Completion -> client * support aoai * fix test error * remove commented code * support aoai * annotations * import * reduce test * skip test * skip test * skip test * debug test * rename test * update workflow * update workflow * env * py version * doc improvement * docstr update * openai<1 * add tiktoken to dependency * filter_func * async test * dependency * migration guide (#477) * migration guide * change in kwargs * simplify header * update optigude description * deal with azure gpt-3.5 * add back test_eval_math_responses * timeout * Add back tests for RetrieveChat (#480) * Add back tests for RetrieveChat * Fix format * Update dependencies order * Fix path * Fix path * Fix path * Fix tests * Add not run openai on MacOS or Win * Update skip openai tests * Remove unnecessary dependencies, improve format * Add py3.8 for testing qdrant * Fix multiline error of windows * Add openai tests * Add dependency mathchat, remove unused envs * retrieve chat is tested * bump version to 0.2.0b1 --------- Co-authored-by: Li Jiang <bnujli@gmail.com>
64 lines
1.9 KiB
Python
64 lines
1.9 KiB
Python
import autogen
|
|
import pytest
|
|
import sys
|
|
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST
|
|
|
|
try:
|
|
from openai import OpenAI
|
|
except ImportError:
|
|
skip = True
|
|
else:
|
|
skip = False
|
|
|
|
|
|
@pytest.mark.skipif(
|
|
skip or not sys.version.startswith("3.10"),
|
|
reason="do not run if openai is not installed or py!=3.10",
|
|
)
|
|
def test_function_call_groupchat():
|
|
import random
|
|
|
|
def get_random_number():
|
|
return random.randint(0, 100)
|
|
|
|
config_list_gpt4 = autogen.config_list_from_json(
|
|
OAI_CONFIG_LIST,
|
|
filter_dict={
|
|
"model": ["gpt-4", "gpt-4-0314", "gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"],
|
|
},
|
|
file_location=KEY_LOC,
|
|
)
|
|
llm_config = {
|
|
"config_list": config_list_gpt4,
|
|
"seed": 42,
|
|
"functions": [
|
|
{
|
|
"name": "get_random_number",
|
|
"description": "Get a random number between 0 and 100",
|
|
"parameters": {
|
|
"type": "object",
|
|
"properties": {},
|
|
},
|
|
},
|
|
],
|
|
}
|
|
user_proxy = autogen.UserProxyAgent(
|
|
name="User_proxy",
|
|
system_message="A human admin that will execute function_calls.",
|
|
function_map={"get_random_number": get_random_number},
|
|
human_input_mode="NEVER",
|
|
)
|
|
coder = autogen.AssistantAgent(
|
|
name="Player",
|
|
system_message="You will can function `get_random_number` to get a random number. Stop only when you get at least 1 even number and 1 odd number. Reply TERMINATE to stop.",
|
|
llm_config=llm_config,
|
|
)
|
|
groupchat = autogen.GroupChat(agents=[user_proxy, coder], messages=[], max_round=7)
|
|
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
|
|
|
|
user_proxy.initiate_chat(manager, message="Let's start the game!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
test_function_call_groupchat()
|