2024-10-02 11:42:27 -07:00
|
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
|
2024-06-26 08:33:28 -07:00
|
|
|
// Tool_Call_With_Ollama_And_LiteLLM.cs
|
|
|
|
|
2024-10-15 07:23:33 -07:00
|
|
|
using System.ClientModel;
|
2024-06-26 08:33:28 -07:00
|
|
|
using AutoGen.Core;
|
2024-08-27 14:37:47 -07:00
|
|
|
using AutoGen.OpenAI.Extension;
|
|
|
|
using OpenAI;
|
2024-06-26 08:33:28 -07:00
|
|
|
|
|
|
|
namespace AutoGen.OpenAI.Sample;
|
|
|
|
|
2024-06-30 11:43:22 -07:00
|
|
|
#region Function
|
2024-06-26 08:33:28 -07:00
|
|
|
public partial class Function
|
|
|
|
{
|
|
|
|
[Function]
|
|
|
|
public async Task<string> GetWeatherAsync(string city)
|
|
|
|
{
|
|
|
|
return await Task.FromResult("The weather in " + city + " is 72 degrees and sunny.");
|
|
|
|
}
|
|
|
|
}
|
2024-06-30 11:43:22 -07:00
|
|
|
#endregion Function
|
|
|
|
|
2024-06-26 08:33:28 -07:00
|
|
|
public class Tool_Call_With_Ollama_And_LiteLLM
|
|
|
|
{
|
|
|
|
public static async Task RunAsync()
|
|
|
|
{
|
2024-06-30 11:43:22 -07:00
|
|
|
// Before running this code, make sure you have
|
|
|
|
// - Ollama:
|
|
|
|
// - Install dolphincoder:latest in Ollama
|
|
|
|
// - Ollama running on http://localhost:11434
|
|
|
|
// - LiteLLM
|
|
|
|
// - Install LiteLLM
|
|
|
|
// - Start LiteLLM with the following command:
|
|
|
|
// - litellm --model ollama_chat/dolphincoder --port 4000
|
2024-06-26 08:33:28 -07:00
|
|
|
|
2024-06-30 11:43:22 -07:00
|
|
|
# region Create_tools
|
2024-06-26 08:33:28 -07:00
|
|
|
var functions = new Function();
|
|
|
|
var functionMiddleware = new FunctionCallMiddleware(
|
|
|
|
functions: [functions.GetWeatherAsyncFunctionContract],
|
|
|
|
functionMap: new Dictionary<string, Func<string, Task<string>>>
|
|
|
|
{
|
|
|
|
{ functions.GetWeatherAsyncFunctionContract.Name!, functions.GetWeatherAsyncWrapper },
|
|
|
|
});
|
2024-06-30 11:43:22 -07:00
|
|
|
#endregion Create_tools
|
|
|
|
#region Create_Agent
|
|
|
|
var liteLLMUrl = "http://localhost:4000";
|
2024-06-26 08:33:28 -07:00
|
|
|
|
|
|
|
// api-key is not required for local server
|
|
|
|
// so you can use any string here
|
2024-10-15 07:23:33 -07:00
|
|
|
var openAIClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
2024-08-27 14:37:47 -07:00
|
|
|
{
|
|
|
|
Endpoint = new Uri("http://localhost:4000"),
|
|
|
|
});
|
2024-06-26 08:33:28 -07:00
|
|
|
|
|
|
|
var agent = new OpenAIChatAgent(
|
2024-08-27 14:37:47 -07:00
|
|
|
chatClient: openAIClient.GetChatClient("dolphincoder:latest"),
|
2024-06-26 08:33:28 -07:00
|
|
|
name: "assistant",
|
|
|
|
systemMessage: "You are a helpful AI assistant")
|
|
|
|
.RegisterMessageConnector()
|
|
|
|
.RegisterMiddleware(functionMiddleware)
|
|
|
|
.RegisterPrintMessage();
|
|
|
|
|
|
|
|
var reply = await agent.SendAsync("what's the weather in new york");
|
|
|
|
#endregion Create_Agent
|
|
|
|
}
|
|
|
|
}
|