2024-02-28 17:11:08 -08:00
#!/usr/bin/env python3 -m pytest
2023-08-07 18:34:47 -07:00
import asyncio
2024-01-05 17:24:49 +03:00
import os
2024-04-05 10:26:06 +08:00
import sys
import pytest
from test_assistant_agent import KEY_LOC , OAI_CONFIG_LIST
import autogen
2024-01-05 17:24:49 +03:00
sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , " .. " ) )
2024-04-17 13:10:18 -07:00
from conftest import reason , skip_openai # noqa: E402
2023-12-31 22:37:21 +03:00
2023-08-07 18:34:47 -07:00
def get_market_news ( ind , ind_upper ) :
data = {
" feed " : [
{
" title " : " Palantir CEO Says Our Generation ' s Atomic Bomb Could Be AI Weapon - And Arrive Sooner Than You Think - Palantir Technologies ( NYSE:PLTR ) " ,
" summary " : " Christopher Nolan ' s blockbuster movie \" Oppenheimer \" has reignited the public discourse surrounding the United States ' use of an atomic bomb on Japan at the end of World War II. " ,
" overall_sentiment_score " : 0.009687 ,
} ,
{
" title " : ' 3 " Hedge Fund Hotels " Pulling into Support ' ,
" summary " : " Institutional quality stocks have several benefits including high-liquidity, low beta, and a long runway. Strategist Andrew Rocco breaks down what investors should look for and pitches 3 ideas. " ,
" banner_image " : " https://staticx-tuner.zacks.com/images/articles/main/92/87.jpg " ,
" overall_sentiment_score " : 0.219747 ,
} ,
{
" title " : " PDFgear, Bringing a Completely-Free PDF Text Editing Feature " ,
" summary " : " LOS ANGELES, July 26, 2023 /PRNewswire/ -- PDFgear, a leading provider of PDF solutions, announced a piece of exciting news for everyone who works extensively with PDF documents. " ,
" overall_sentiment_score " : 0.360071 ,
} ,
{
" title " : " Researchers Pitch ' Immunizing ' Images Against Deepfake Manipulation " ,
" summary " : " A team at MIT says injecting tiny disruptive bits of code can cause distorted deepfake images. " ,
" overall_sentiment_score " : - 0.026894 ,
} ,
{
" title " : " Nvidia wins again - plus two more takeaways from this week ' s mega-cap earnings " ,
" summary " : " We made some key conclusions combing through quarterly results for Microsoft and Alphabet and listening to their conference calls with investors. " ,
" overall_sentiment_score " : 0.235177 ,
} ,
]
}
feeds = data [ " feed " ] [ ind : ind_upper ]
feeds_summary = " \n " . join (
[
f " News summary: { f [ ' title ' ] } . { f [ ' summary ' ] } overall_sentiment_score: { f [ ' overall_sentiment_score ' ] } "
for f in feeds
]
)
return feeds_summary
2024-04-17 13:10:18 -07:00
@pytest.mark.skipif ( skip_openai , reason = reason )
2023-12-07 12:32:27 -05:00
@pytest.mark.asyncio
async def test_async_groupchat ( ) :
2024-04-17 13:10:18 -07:00
config_list = autogen . config_list_from_json ( OAI_CONFIG_LIST , KEY_LOC , filter_dict = { " tags " : [ " gpt-3.5-turbo " ] } )
2023-12-07 12:32:27 -05:00
# create an AssistantAgent instance named "assistant"
assistant = autogen . AssistantAgent (
name = " assistant " ,
llm_config = {
" config_list " : config_list ,
" temperature " : 0 ,
} ,
system_message = " You are a helpful assistant. Reply ' TERMINATE ' to end the conversation. " ,
)
# create a UserProxyAgent instance named "user"
user_proxy = autogen . UserProxyAgent (
name = " user " ,
human_input_mode = " NEVER " ,
max_consecutive_auto_reply = 5 ,
code_execution_config = False ,
default_auto_reply = None ,
)
2024-04-17 13:10:18 -07:00
groupchat = autogen . GroupChat (
agents = [ user_proxy , assistant ] , messages = [ ] , max_round = 3 , speaker_selection_method = " round_robin "
)
2023-12-07 12:32:27 -05:00
manager = autogen . GroupChatManager (
groupchat = groupchat ,
is_termination_msg = lambda x : " TERMINATE " in x . get ( " content " , " " ) ,
)
2024-04-17 13:10:18 -07:00
await user_proxy . a_initiate_chat ( manager , message = """ 223434*3422=?. """ )
2023-12-07 12:32:27 -05:00
assert len ( user_proxy . chat_messages ) > 0
2024-04-17 13:10:18 -07:00
@pytest.mark.skipif ( skip_openai , reason = reason )
2023-11-03 21:01:49 -07:00
@pytest.mark.asyncio
2023-08-07 18:34:47 -07:00
async def test_stream ( ) :
2024-04-17 13:10:18 -07:00
config_list = autogen . config_list_from_json ( OAI_CONFIG_LIST , KEY_LOC , filter_dict = { " tags " : [ " gpt-3.5-turbo " ] } )
2023-08-07 18:34:47 -07:00
data = asyncio . Future ( )
async def add_stock_price_data ( ) :
# simulating the data stream
for i in range ( 0 , 2 , 1 ) :
latest_news = get_market_news ( i , i + 1 )
if data . done ( ) :
data . result ( ) . append ( latest_news )
else :
data . set_result ( [ latest_news ] )
# print(data.result())
await asyncio . sleep ( 5 )
data_task = asyncio . create_task ( add_stock_price_data ( ) )
# create an AssistantAgent instance named "assistant"
assistant = autogen . AssistantAgent (
name = " assistant " ,
llm_config = {
2023-11-03 21:01:49 -07:00
" timeout " : 600 ,
2023-11-08 15:39:02 -08:00
" cache_seed " : 41 ,
2023-08-07 18:34:47 -07:00
" config_list " : config_list ,
" temperature " : 0 ,
} ,
system_message = " You are a financial expert. " ,
)
# create a UserProxyAgent instance named "user"
user_proxy = autogen . UserProxyAgent (
name = " user " ,
human_input_mode = " NEVER " ,
max_consecutive_auto_reply = 5 ,
code_execution_config = False ,
default_auto_reply = None ,
)
2023-08-14 00:09:45 -07:00
async def add_data_reply ( recipient , messages , sender , config ) :
2023-08-07 18:34:47 -07:00
await asyncio . sleep ( 0.1 )
2023-08-14 00:09:45 -07:00
data = config [ " news_stream " ]
2023-08-07 18:34:47 -07:00
if data . done ( ) :
result = data . result ( )
if result :
news_str = " \n " . join ( result )
result . clear ( )
return (
True ,
f " Just got some latest market news. Merge your new suggestion with previous ones. \n { news_str } " ,
)
return False , None
2023-12-09 22:50:36 +02:00
user_proxy . register_reply ( autogen . AssistantAgent , add_data_reply , position = 2 , config = { " news_stream " : data } )
2023-08-07 18:34:47 -07:00
2024-02-07 12:17:05 -05:00
chat_res = await user_proxy . a_initiate_chat (
assistant , message = """ Give me investment suggestion in 3 bullet points. """ , summary_method = " reflection_with_llm "
2023-08-07 18:34:47 -07:00
)
2024-02-07 12:17:05 -05:00
print ( " Chat summary: " , chat_res . summary )
print ( " Chat cost: " , chat_res . cost )
2023-08-07 18:34:47 -07:00
while not data_task . done ( ) and not data_task . cancelled ( ) :
reply = await user_proxy . a_generate_reply ( sender = assistant )
if reply is not None :
2024-04-17 13:10:18 -07:00
await user_proxy . a_send ( reply , assistant )
# print("Chat summary and cost:", res.summary, res.cost)
2023-08-07 18:34:47 -07:00
if __name__ == " __main__ " :
2024-04-17 13:10:18 -07:00
# asyncio.run(test_stream())
asyncio . run ( test_async_groupchat ( ) )