autogen/dotnet/test/AutoGen.Tests/SingleAgentTest.cs
Xiaoyun Zhang 600bd3f2fe
Bring Dotnet AutoGen (#924)
* update readme

* update

* update

* update

* update

* update

* update

* add sample project

* revert notebook change back

* update

* update interactive version

* add nuget package

* refactor Message

* update example

* add azure nightly build pipeline

* Set up CI with Azure Pipelines

[skip ci]

* Update nightly-build.yml for Azure Pipelines

* add dotnet interactive package

* add dotnet interactive package

* update pipeline

* add nuget feed back

* remove dotnet-tool feed

* remove dotnet-tool feed comment

* update pipeline

* update build name

* Update nightly-build.yml

* Delete .github/workflows/dotnet-ci.yml

* update

* add working_dir to use step

* add initateChat api

* update oai package

* Update dotnet-build.yml

* Update dotnet-run-openai-test-and-notebooks.yml

* update build workflow

* update build workflow

* update nuget feed

* update nuget feed

* update aoai and sk version

* Update InteractiveService.cs

* add support for GPT 4V

* add DalleAndGPT4V example

* update example

* add user proxy agent

* add readme

* bump version

* update example

* add dotnet interactive hook

* update

* udpate tests

* add website

* update index.md

* add docs

* update doc

* move sk dependency out of core package

* udpate doc

* Update Use-function-call.md

* add type safe function call document

* update doc

* update doc

* add dock

* Update Use-function-call.md

* add GenerateReplyOptions

* remove IChatLLM

* update version

* update doc

* update website

* add sample

* fix link

* add middleware agent

* clean up doc

* bump version

* update doc

* update

* add Other Language

* remove warnings

* add sign.props

* add sign step

* fix pipelien

* auth

* real sign

* disable PR trigger

* update

* disable PR trigger

* use microbuild machine

* update build pipeline to add publish to internal feed

* add internal feed

* fix build pipeline

* add dotnet prefix

* update ci

* add build number

* update run number

* update source

* update token

* update

* remove adding source

* add publish to github package

* try again

* try again

* ask for write pacakge

* disable package when branch is not main

* update

* implement streaming agent

* add test for streaming function call

* update

* fix #1588

* enable PR check for dotnet branch

* add website readme

* only publish to dotnet feed when pushing to dotnet branch

* remove openai-test-and-notebooks workflow

* update readme

* update readme

* update workflow

* update getting-start

* upgrade test and sample proejct to use .net 8

* fix global.json format && make loadFromConfig API internal only before implementing

* update

* add support for LM studio

* add doc

* Update README.md

* add push and workflow_dispatch trigger

* disable PR for main

* add dotnet env

* Update Installation.md

* add nuget

* refer to newtonsoft 13

* update branch to dotnet in docfx

* Update Installation.md

* pull out HumanInputMiddleware and FunctionCallMiddleware

* fix tests

* add link to sample folder

* refactor message

* refactor over IMessage

* add more tests

* add more test

* fix build error

* rename header

* add semantic kernel project

* update sk example

* update dotnet version

* add LMStudio function call example

* rename LLaMAFunctin

* remove dotnet run openai test and notebook workflow

* add FunctionContract and test

* update doc

* add documents

* add workflow

* update

* update sample

* fix warning in test

* reult length can be less then maximumOutputToKeep (#1804)

* merge with main

* add option to retrieve inner agent and middlewares from MiddlewareAgent

* update doc

* adjust namespace

* update readme

* fix test

* use IMessage

* more updates

* update

* fix test

* add comments

* use FunctionContract to replace FunctionDefinition

* move AutoGen contrac to AutoGen.Core

* update installation

* refactor streamingAgent by adding StreamingMessage type

* update sample

* update samples

* update

* update

* add test

* fix test

* bump version

* add openaichat test

* update

* Update Example03_Agent_FunctionCall.cs

* [.Net] improve docs (#1862)

* add doc

* add doc

* add doc

* add doc

* add doc

* add doc

* update

* fix test error

* fix some error

* fix test

* fix test

* add more tests

* edits

---------

Co-authored-by: ekzhu <ekzhu@users.noreply.github.com>

* [.Net] Add fill form example (#1911)

* add form filler example

* update

* fix ci error

* [.Net] Add using AutoGen.Core in source generator (#1983)

* fix using namespace bug in source generator

* remove using in sourcegenerator test

* disable PR test

* Add .idea to .gitignore (#1988)

* [.Net] publish to nuget.org feed (#1987)

* publish to nuget

* update ci

* update dotnet-release

* update release pipeline

* add source

* remove empty symbol package

* update pipeline

* remove tag

* update installation guide

* [.Net] Rename some classes && APIs based on doc review (#1980)

* rename sequential group chat to round robin group chat

* rename to sendInstruction

* rename workflow to graph

* rename some api

* bump version

* move Graph to GroupChat folder

* rename fill application example

* [.Net] Improve package description (#2161)

* add discord link and update package description

* Update getting-start.md

* [.Net] Fix document comment from the most recent AutoGen.Net engineer sync (#2231)

* update

* rename RegisterPrintMessageHook to RegisterPrintMessage

* update website

* update update.md

* fix link error

* [.Net] Enable JsonMode and deterministic output in AutoGen.OpenAI OpenAIChatAgent (#2347)

* update openai version && add sample for json output

* add example in web

* update update.md

* update image url

* [.Net] Add AutoGen.Mistral package (#2330)

* add mstral client

* enable streaming support

* add mistralClientAgent

* add test for function call

* add extension

* add support for toolcall and toolcall result message

* add support for aggregate message

* implement streaming function call

* track (#2471)

* [.Net] add mistral example (#2482)

* update existing examples to use messageCOnnector

* add overview

* add function call document

* add example 14

* add mistral token count usage example

* update version

* Update dotnet-release.yml (#2488)

* update

* revert gitattributes

---------

Co-authored-by: mhensen <mh@webvize.nl>
Co-authored-by: ekzhu <ekzhu@users.noreply.github.com>
Co-authored-by: Krzysztof Kasprowicz <60486987+Krzysztof318@users.noreply.github.com>
2024-04-26 16:21:46 +00:00

326 lines
13 KiB
C#

// Copyright (c) Microsoft Corporation. All rights reserved.
// SingleAgentTest.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AutoGen.OpenAI;
using Azure.AI.OpenAI;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
namespace AutoGen.Tests
{
public partial class SingleAgentTest
{
private ITestOutputHelper _output;
public SingleAgentTest(ITestOutputHelper output)
{
_output = output;
}
private ILLMConfig CreateAzureOpenAIGPT35TurboConfig()
{
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set");
var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set");
return new AzureOpenAIConfig(endpoint, "gpt-35-turbo-16k", key);
}
private ILLMConfig CreateOpenAIGPT4VisionConfig()
{
var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new ArgumentException("OPENAI_API_KEY is not set");
return new OpenAIConfig(key, "gpt-4-vision-preview");
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task GPTAgentTestAsync()
{
var config = this.CreateAzureOpenAIGPT35TurboConfig();
var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config);
await UpperCaseTest(agent);
await UpperCaseStreamingTestAsync(agent);
}
[ApiKeyFact("OPENAI_API_KEY", "AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task GPTAgentVisionTestAsync()
{
var visionConfig = this.CreateOpenAIGPT4VisionConfig();
var visionAgent = new GPTAgent(
name: "gpt",
systemMessage: "You are a helpful AI assistant",
config: visionConfig,
temperature: 0);
var gpt3Config = this.CreateAzureOpenAIGPT35TurboConfig();
var gpt3Agent = new GPTAgent(
name: "gpt3",
systemMessage: "You are a helpful AI assistant, return highest label from conversation",
config: gpt3Config,
temperature: 0,
functions: new[] { this.GetHighestLabelFunction },
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ nameof(GetHighestLabel), this.GetHighestLabelWrapper },
});
var imageUri = new Uri(@"https://microsoft.github.io/autogen/assets/images/level2algebra-659ba95286432d9945fc89e84d606797.png");
var oaiMessage = new ChatRequestUserMessage(
new ChatMessageTextContentItem("which label has the highest inference cost"),
new ChatMessageImageContentItem(imageUri));
var multiModalMessage = new MultiModalMessage(Role.User,
[
new TextMessage(Role.User, "which label has the highest inference cost", from: "user"),
new ImageMessage(Role.User, imageUri, from: "user"),
],
from: "user");
var imageMessage = new ImageMessage(Role.User, imageUri, from: "user");
IMessage[] messages = [
MessageEnvelope.Create(oaiMessage),
multiModalMessage,
imageMessage,
];
foreach (var message in messages)
{
var response = await visionAgent.SendAsync(message);
response.From.Should().Be(visionAgent.Name);
var labelResponse = await gpt3Agent.SendAsync(response);
labelResponse.From.Should().Be(gpt3Agent.Name);
labelResponse.GetToolCalls()!.First().FunctionName.Should().Be(nameof(GetHighestLabel));
}
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task GPTFunctionCallAgentTestAsync()
{
var config = this.CreateAzureOpenAIGPT35TurboConfig();
var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunction });
await EchoFunctionCallTestAsync(agentWithFunction);
await UpperCaseTest(agentWithFunction);
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task AssistantAgentFunctionCallTestAsync()
{
var config = this.CreateAzureOpenAIGPT35TurboConfig();
var llmConfig = new ConversableAgentConfig
{
Temperature = 0,
FunctionContracts = new[]
{
this.EchoAsyncFunctionContract,
},
ConfigList = new[]
{
config,
},
};
var assistantAgent = new AssistantAgent(
name: "assistant",
llmConfig: llmConfig);
await EchoFunctionCallTestAsync(assistantAgent);
await UpperCaseTest(assistantAgent);
}
[Fact]
public async Task AssistantAgentDefaultReplyTestAsync()
{
var assistantAgent = new AssistantAgent(
llmConfig: null,
name: "assistant",
defaultReply: "hello world");
var reply = await assistantAgent.SendAsync("hi");
reply.GetContent().Should().Be("hello world");
reply.GetRole().Should().Be(Role.Assistant);
reply.From.Should().Be(assistantAgent.Name);
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task AssistantAgentFunctionCallSelfExecutionTestAsync()
{
var config = this.CreateAzureOpenAIGPT35TurboConfig();
var llmConfig = new ConversableAgentConfig
{
FunctionContracts = new[]
{
this.EchoAsyncFunctionContract,
},
ConfigList = new[]
{
config,
},
};
var assistantAgent = new AssistantAgent(
name: "assistant",
llmConfig: llmConfig,
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ nameof(EchoAsync), this.EchoAsyncWrapper },
});
await EchoFunctionCallExecutionTestAsync(assistantAgent);
await UpperCaseTest(assistantAgent);
}
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
public async Task GPTAgentFunctionCallSelfExecutionTestAsync()
{
var config = this.CreateAzureOpenAIGPT35TurboConfig();
var agent = new GPTAgent(
name: "gpt",
systemMessage: "You are a helpful AI assistant",
config: config,
temperature: 0,
functions: new[] { this.EchoAsyncFunction },
functionMap: new Dictionary<string, Func<string, Task<string>>>
{
{ nameof(EchoAsync), this.EchoAsyncWrapper },
});
await EchoFunctionCallExecutionStreamingTestAsync(agent);
await EchoFunctionCallExecutionTestAsync(agent);
await UpperCaseTest(agent);
}
/// <summary>
/// echo when asked.
/// </summary>
/// <param name="message">message to echo</param>
[FunctionAttribute]
public async Task<string> EchoAsync(string message)
{
return $"[ECHO] {message}";
}
/// <summary>
/// return the label name with hightest inference cost
/// </summary>
/// <param name="labelName"></param>
/// <returns></returns>
[FunctionAttribute]
public async Task<string> GetHighestLabel(string labelName, string color)
{
return $"[HIGHEST_LABEL] {labelName} {color}";
}
public async Task EchoFunctionCallTestAsync(IAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that call echo function");
var helloWorld = new TextMessage(Role.User, "echo Hello world");
var reply = await agent.SendAsync(chatHistory: new[] { message, helloWorld });
reply.From.Should().Be(agent.Name);
reply.GetToolCalls()!.First().FunctionName.Should().Be(nameof(EchoAsync));
}
public async Task EchoFunctionCallExecutionTestAsync(IAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says");
var helloWorld = new TextMessage(Role.User, "echo Hello world");
var reply = await agent.SendAsync(chatHistory: new[] { message, helloWorld });
reply.GetContent().Should().Be("[ECHO] Hello world");
reply.From.Should().Be(agent.Name);
reply.Should().BeOfType<AggregateMessage<ToolCallMessage, ToolCallResultMessage>>();
}
public async Task EchoFunctionCallExecutionStreamingTestAsync(IStreamingAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says");
var helloWorld = new TextMessage(Role.User, "echo Hello world");
var option = new GenerateReplyOptions
{
Temperature = 0,
};
var replyStream = await agent.GenerateStreamingReplyAsync(messages: new[] { message, helloWorld }, option);
var answer = "[ECHO] Hello world";
IStreamingMessage? finalReply = default;
await foreach (var reply in replyStream)
{
reply.From.Should().Be(agent.Name);
finalReply = reply;
}
if (finalReply is AggregateMessage<ToolCallMessage, ToolCallResultMessage> aggregateMessage)
{
var toolCallResultMessage = aggregateMessage.Message2;
toolCallResultMessage.ToolCalls.First().Result.Should().Be(answer);
toolCallResultMessage.From.Should().Be(agent.Name);
toolCallResultMessage.ToolCalls.First().FunctionName.Should().Be(nameof(EchoAsync));
}
else
{
throw new Exception("unexpected message type");
}
}
public async Task UpperCaseTest(IAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that convert user message to upper case");
var uppCaseMessage = new TextMessage(Role.User, "abcdefg");
var reply = await agent.SendAsync(chatHistory: new[] { message, uppCaseMessage });
reply.GetContent().Should().Contain("ABCDEFG");
reply.From.Should().Be(agent.Name);
}
public async Task UpperCaseStreamingTestAsync(IStreamingAgent agent)
{
var message = new TextMessage(Role.System, "You are a helpful AI assistant that convert user message to upper case");
var helloWorld = new TextMessage(Role.User, "a b c d e f g h i j k l m n");
var option = new GenerateReplyOptions
{
Temperature = 0,
};
var replyStream = await agent.GenerateStreamingReplyAsync(messages: new[] { message, helloWorld }, option);
var answer = "A B C D E F G H I J K L M N";
TextMessage? finalReply = default;
await foreach (var reply in replyStream)
{
if (reply is TextMessageUpdate update)
{
update.From.Should().Be(agent.Name);
if (finalReply is null)
{
finalReply = new TextMessage(update);
}
else
{
finalReply.Update(update);
}
continue;
}
else if (reply is TextMessage textMessage)
{
finalReply = textMessage;
continue;
}
throw new Exception("unexpected message type");
}
finalReply!.Content.Should().Contain(answer);
finalReply!.Role.Should().Be(Role.Assistant);
finalReply!.From.Should().Be(agent.Name);
}
}
}