// Copyright (c) Microsoft Corporation. All rights reserved. // LMStudioAgent.cs using System; using System.Collections.Generic; using System.Threading.Tasks; using AutoGen.OpenAI; using Azure.AI.OpenAI; using Azure.Core.Pipeline; using Azure.Core; namespace AutoGen.LMStudio; /// /// agent that consumes local server from LM Studio /// /// /// [!code-csharp[LMStudioAgent](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_example_1)] /// public class LMStudioAgent : IAgent { private readonly GPTAgent innerAgent; public LMStudioAgent( string name, LMStudioConfig config, string systemMessage = "You are a helpful AI assistant", float temperature = 0.7f, int maxTokens = 1024, IEnumerable? functions = null, IDictionary>>? functionMap = null) { var client = ConfigOpenAIClientForLMStudio(config); innerAgent = new GPTAgent( name: name, systemMessage: systemMessage, openAIClient: client, modelName: "llm", // model name doesn't matter for LM Studio temperature: temperature, maxTokens: maxTokens, functions: functions, functionMap: functionMap); } public string Name => innerAgent.Name; public Task GenerateReplyAsync( IEnumerable messages, GenerateReplyOptions? options = null, System.Threading.CancellationToken cancellationToken = default) { return innerAgent.GenerateReplyAsync(messages, options, cancellationToken); } private OpenAIClient ConfigOpenAIClientForLMStudio(LMStudioConfig config) { // create uri from host and port var uri = config.Uri; var accessToken = new AccessToken(string.Empty, DateTimeOffset.Now.AddDays(180)); var tokenCredential = DelegatedTokenCredential.Create((_, _) => accessToken); var openAIClient = new OpenAIClient(uri, tokenCredential); // remove authenication header from pipeline var pipeline = HttpPipelineBuilder.Build( new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2022_12_01), Array.Empty(), [], new ResponseClassifier()); // use reflection to override _pipeline field var field = typeof(OpenAIClient).GetField("_pipeline", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); field.SetValue(openAIClient, pipeline); // use reflection to set _isConfiguredForAzureOpenAI to false var isConfiguredForAzureOpenAIField = typeof(OpenAIClient).GetField("_isConfiguredForAzureOpenAI", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); isConfiguredForAzureOpenAIField.SetValue(openAIClient, false); return openAIClient; } }