mirror of
https://github.com/microsoft/autogen.git
synced 2025-08-02 22:02:21 +00:00
81 lines
2.9 KiB
C#
81 lines
2.9 KiB
C#
![]() |
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|||
|
// LMStudioAgent.cs
|
|||
|
|
|||
|
using System;
|
|||
|
using System.Collections.Generic;
|
|||
|
using System.Threading.Tasks;
|
|||
|
using AutoGen.OpenAI;
|
|||
|
using Azure.AI.OpenAI;
|
|||
|
using Azure.Core.Pipeline;
|
|||
|
using Azure.Core;
|
|||
|
|
|||
|
namespace AutoGen.LMStudio;
|
|||
|
|
|||
|
/// <summary>
|
|||
|
/// agent that consumes local server from LM Studio
|
|||
|
/// </summary>
|
|||
|
/// <example>
|
|||
|
/// [!code-csharp[LMStudioAgent](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_example_1)]
|
|||
|
/// </example>
|
|||
|
public class LMStudioAgent : IAgent
|
|||
|
{
|
|||
|
private readonly GPTAgent innerAgent;
|
|||
|
|
|||
|
public LMStudioAgent(
|
|||
|
string name,
|
|||
|
LMStudioConfig config,
|
|||
|
string systemMessage = "You are a helpful AI assistant",
|
|||
|
float temperature = 0.7f,
|
|||
|
int maxTokens = 1024,
|
|||
|
IEnumerable<FunctionDefinition>? functions = null,
|
|||
|
IDictionary<string, Func<string, Task<string>>>? functionMap = null)
|
|||
|
{
|
|||
|
var client = ConfigOpenAIClientForLMStudio(config);
|
|||
|
innerAgent = new GPTAgent(
|
|||
|
name: name,
|
|||
|
systemMessage: systemMessage,
|
|||
|
openAIClient: client,
|
|||
|
modelName: "llm", // model name doesn't matter for LM Studio
|
|||
|
temperature: temperature,
|
|||
|
maxTokens: maxTokens,
|
|||
|
functions: functions,
|
|||
|
functionMap: functionMap);
|
|||
|
}
|
|||
|
|
|||
|
public string Name => innerAgent.Name;
|
|||
|
|
|||
|
public Task<IMessage> GenerateReplyAsync(
|
|||
|
IEnumerable<IMessage> messages,
|
|||
|
GenerateReplyOptions? options = null,
|
|||
|
System.Threading.CancellationToken cancellationToken = default)
|
|||
|
{
|
|||
|
return innerAgent.GenerateReplyAsync(messages, options, cancellationToken);
|
|||
|
}
|
|||
|
|
|||
|
private OpenAIClient ConfigOpenAIClientForLMStudio(LMStudioConfig config)
|
|||
|
{
|
|||
|
// create uri from host and port
|
|||
|
var uri = config.Uri;
|
|||
|
var accessToken = new AccessToken(string.Empty, DateTimeOffset.Now.AddDays(180));
|
|||
|
var tokenCredential = DelegatedTokenCredential.Create((_, _) => accessToken);
|
|||
|
var openAIClient = new OpenAIClient(uri, tokenCredential);
|
|||
|
|
|||
|
// remove authenication header from pipeline
|
|||
|
var pipeline = HttpPipelineBuilder.Build(
|
|||
|
new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2022_12_01),
|
|||
|
Array.Empty<HttpPipelinePolicy>(),
|
|||
|
[],
|
|||
|
new ResponseClassifier());
|
|||
|
|
|||
|
// use reflection to override _pipeline field
|
|||
|
var field = typeof(OpenAIClient).GetField("_pipeline", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
|
|||
|
field.SetValue(openAIClient, pipeline);
|
|||
|
|
|||
|
// use reflection to set _isConfiguredForAzureOpenAI to false
|
|||
|
var isConfiguredForAzureOpenAIField = typeof(OpenAIClient).GetField("_isConfiguredForAzureOpenAI", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
|
|||
|
isConfiguredForAzureOpenAIField.SetValue(openAIClient, false);
|
|||
|
|
|||
|
return openAIClient;
|
|||
|
}
|
|||
|
}
|