diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs
index 0f2f9b5465ca..cf55801420df 100644
--- a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs
+++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Diagnostics;
using Azure.AI.Projects;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
@@ -13,19 +12,16 @@ namespace Agents;
///
/// Demonstrate using code-interpreter to manipulate and generate csv files with .
///
-public class AzureAIAgent_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output)
+public class AzureAIAgent_FileManipulation(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task AnalyzeCSVFileUsingAzureAIAgentAsync()
{
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
-
await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!;
- AgentFile fileInfo = await client.UploadFileAsync(stream, AgentFilePurpose.Agents, "sales.csv");
+ AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "sales.csv");
// Define the agent
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
tools: [new CodeInterpreterToolDefinition()],
toolResources:
@@ -36,7 +32,7 @@ public async Task AnalyzeCSVFileUsingAzureAIAgentAsync()
FileIds = { fileInfo.Id },
}
});
- AzureAIAgent agent = new(definition, clientProvider);
+ AzureAIAgent agent = new(definition, this.AgentsClient);
// Create a chat for agent interaction.
AgentGroupChat chat = new();
@@ -50,8 +46,8 @@ public async Task AnalyzeCSVFileUsingAzureAIAgentAsync()
}
finally
{
- await client.DeleteAgentAsync(agent.Id);
- await client.DeleteFileAsync(fileInfo.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteFileAsync(fileInfo.Id);
await chat.ResetAsync();
}
@@ -65,45 +61,7 @@ async Task InvokeAgentAsync(string input)
await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
this.WriteAgentChatMessage(response);
- await this.DownloadContentAsync(client, response);
- }
- }
- }
-
- private async Task DownloadContentAsync(AgentsClient client, ChatMessageContent message)
- {
- foreach (KernelContent item in message.Items)
- {
- if (item is AnnotationContent annotation)
- {
- await this.DownloadFileAsync(client, annotation.FileId!);
- }
- }
- }
-
- private async Task DownloadFileAsync(AgentsClient client, string fileId, bool launchViewer = false)
- {
- AgentFile fileInfo = client.GetFile(fileId);
- if (fileInfo.Purpose == AgentFilePurpose.AgentsOutput)
- {
- string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename));
- if (launchViewer)
- {
- filePath = Path.ChangeExtension(filePath, ".png");
- }
-
- BinaryData content = await client.GetFileContentAsync(fileId);
- File.WriteAllBytes(filePath, content.ToArray());
- Console.WriteLine($" File #{fileId} saved to: {filePath}");
-
- if (launchViewer)
- {
- Process.Start(
- new ProcessStartInfo
- {
- FileName = "cmd.exe",
- Arguments = $"/C start {filePath}"
- });
+ await this.DownloadContentAsync(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs
index 731c4b466ae8..de2fc685a357 100644
--- a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs
@@ -11,7 +11,7 @@ namespace Agents;
///
/// Demonstrate consuming "streaming" message for .
///
-public class AzureAIAgent_Streaming(ITestOutputHelper output) : BaseAgentsTest(output)
+public class AzureAIAgent_Streaming(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task UseStreamingAgentAsync()
@@ -20,17 +20,15 @@ public async Task UseStreamingAgentAsync()
const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
AgentName,
null,
AgentInstructions);
- AzureAIAgent agent = new(definition, clientProvider);
+ AzureAIAgent agent = new(definition, this.AgentsClient);
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, thread.Id, "Fortune favors the bold.");
@@ -48,14 +46,12 @@ public async Task UseStreamingAssistantAgentWithPluginAsync()
const string AgentInstructions = "Answer questions about the menu.";
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
AgentName,
null,
AgentInstructions);
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
@@ -65,7 +61,7 @@ public async Task UseStreamingAssistantAgentWithPluginAsync()
agent.Kernel.Plugins.Add(plugin);
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?");
@@ -82,21 +78,19 @@ public async Task UseStreamingAssistantWithCodeInterpreterAsync()
const string AgentInstructions = "Solve math problems with code.";
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
AgentName,
null,
AgentInstructions,
[new CodeInterpreterToolDefinition()]);
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, thread.Id, "Is 191 a prime number?");
diff --git a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs
index c2a3fd377071..a8e98f2e107e 100644
--- a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs
+++ b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Text;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -9,11 +8,13 @@ namespace Agents;
public class DeclarativeAgents(ITestOutputHelper output) : BaseAgentsTest(output)
{
- [InlineData("SchedulingAssistant.json", "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")]
+ [InlineData(
+ "SchedulingAssistant.json",
+ "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")]
[Theory]
public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileName, string input)
{
- var kernel = CreateKernel();
+ var kernel = this.CreateKernelWithChatCompletion();
kernel.AutoFunctionInvocationFilters.Add(new ExpectedSchemaFunctionFilter());
var manifestLookupDirectory = Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "Resources", "DeclarativeAgents");
var manifestFilePath = Path.Combine(manifestLookupDirectory, agentFileName);
@@ -30,9 +31,8 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa
Assert.NotNull(agent.Instructions);
Assert.NotEmpty(agent.Instructions);
- ChatMessageContent message = new(AuthorRole.User, input);
- ChatHistory chatHistory = [message];
- StringBuilder sb = new();
+ ChatHistory chatHistory = [new ChatMessageContent(AuthorRole.User, input)];
+
var kernelArguments = new KernelArguments(new PromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(
@@ -42,23 +42,14 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa
}
)
});
- await foreach (ChatMessageContent response in agent.InvokeAsync(chatHistory, kernelArguments))
- {
- chatHistory.Add(response);
- sb.Append(response.Content);
- }
- Assert.NotEmpty(chatHistory.Skip(1));
- }
- private Kernel CreateKernel()
- {
- IKernelBuilder builder = Kernel.CreateBuilder();
-
- base.AddChatCompletionToKernel(builder);
- return builder.Build();
+ var responses = await agent.InvokeAsync(chatHistory, kernelArguments).ToArrayAsync();
+ Assert.NotEmpty(responses);
}
+
private sealed class ExpectedSchemaFunctionFilter : IAutoFunctionInvocationFilter
- {//TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live
+ {
+ //TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
{
await next(context);
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
index 159441147f77..0895308f0215 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
@@ -4,13 +4,14 @@
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
///
/// Demonstrate that two different agent types are able to participate in the same conversation.
/// In this case a and participate.
///
-public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Agents(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -44,16 +45,16 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync()
Kernel = this.CreateKernelWithChatCompletion(),
};
- OpenAIAssistantAgent agentWriter =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = CopyWriterInstructions,
- Name = CopyWriterName,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: CopyWriterName,
+ instructions: CopyWriterInstructions,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient);
// Create a chat for agent interaction.
AgentGroupChat chat =
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
index 4f12657e0d7a..56ff0f331f0b 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
@@ -3,7 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
+using OpenAI.Assistants;
using Resources;
namespace Agents;
@@ -12,36 +12,27 @@ namespace Agents;
/// Demonstrate agent interacts with
/// when it produces file output.
///
-public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Files(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language.";
[Fact]
public async Task AnalyzeFileAndGenerateReportAsync()
{
- OpenAIClientProvider provider = this.GetClientProvider();
-
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
-
- OpenAIFile uploadFile =
- await fileClient.UploadFileAsync(
- new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")),
- "30-user-context.txt",
- FileUploadPurpose.Assistants);
-
- Console.WriteLine(this.ApiKey);
+ await using Stream stream = EmbeddedResource.ReadStream("30-user-context.txt")!;
+ string fileId = await this.Client.UploadAssistantFileAsync(stream, "30-user-context.txt");
// Define the agents
- OpenAIAssistantAgent analystAgent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- EnableCodeInterpreter = true,
- CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ enableCodeInterpreter: true,
+ codeInterpreterFileIds: [fileId],
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient);
ChatCompletionAgent summaryAgent =
new()
@@ -66,8 +57,8 @@ Create a tab delimited file report of the ordered (descending) frequency distrib
}
finally
{
- await analystAgent.DeleteAsync();
- await fileClient.DeleteFileAsync(uploadFile.Id);
+ await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id);
+ await this.Client.DeleteFileAsync(fileId);
}
// Local function to invoke agent and display the conversation messages.
@@ -83,7 +74,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null)
await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
this.WriteAgentChatMessage(response);
- await this.DownloadResponseContentAsync(fileClient, response);
+ await this.DownloadResponseContentAsync(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
index 03f047c756bd..158da60e418a 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
@@ -3,7 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
+using OpenAI.Assistants;
namespace Agents;
@@ -11,7 +11,7 @@ namespace Agents;
/// Demonstrate agent interacts with
/// when it produces image output.
///
-public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Images(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string AnalystName = "Analyst";
private const string AnalystInstructions = "Create charts as requested without explanation.";
@@ -22,22 +22,17 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output)
[Fact]
public async Task AnalyzeDataAndGenerateChartAsync()
{
- OpenAIClientProvider provider = this.GetClientProvider();
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: AnalystName,
+ instructions: AnalystInstructions,
+ enableCodeInterpreter: true,
+ metadata: SampleMetadata);
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
-
- // Define the agents
- OpenAIAssistantAgent analystAgent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = AnalystInstructions,
- Name = AnalystName,
- EnableCodeInterpreter = true,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Create the agent
+ OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient);
ChatCompletionAgent summaryAgent =
new()
@@ -75,7 +70,7 @@ await InvokeAgentAsync(
}
finally
{
- await analystAgent.DeleteAsync();
+ await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id);
}
// Local function to invoke agent and display the conversation messages.
@@ -91,7 +86,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null)
await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
this.WriteAgentChatMessage(response);
- await this.DownloadResponseImageAsync(fileClient, response);
+ await this.DownloadResponseImageAsync(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
index 7c9a2490d3e0..431dcc982a5e 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
@@ -3,13 +3,14 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
///
/// Demonstrate the use of .
///
-public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Reset(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string AgentInstructions =
"""
@@ -20,18 +21,15 @@ The user may either provide information or query on information previously provi
[Fact]
public async Task ResetChatAsync()
{
- OpenAIClientProvider provider = this.GetClientProvider();
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ instructions: AgentInstructions,
+ metadata: SampleMetadata);
- // Define the agents
- OpenAIAssistantAgent assistantAgent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Name = nameof(OpenAIAssistantAgent),
- Instructions = AgentInstructions,
- },
- kernel: new Kernel());
+ // Create the agent
+ OpenAIAssistantAgent assistantAgent = new(assistant, this.AssistantClient);
ChatCompletionAgent chatAgent =
new()
@@ -64,7 +62,7 @@ await OpenAIAssistantAgent.CreateAsync(
finally
{
await chat.ResetAsync();
- await assistantAgent.DeleteAsync();
+ await this.AssistantClient.DeleteAssistantAsync(assistantAgent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs
index 27212e292366..4979ceedacb1 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs
@@ -4,13 +4,14 @@
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
///
/// Demonstrate the serialization of with a
/// and an .
///
-public class MixedChat_Serialization(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Serialization(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string TranslatorName = "Translator";
private const string TranslatorInstructions =
@@ -39,15 +40,16 @@ public async Task SerializeAndRestoreAgentGroupChatAsync()
Kernel = this.CreateKernelWithChatCompletion(),
};
- OpenAIAssistantAgent agentCounter =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- clientProvider: this.GetClientProvider(),
- definition: new(this.Model)
- {
- Instructions = CounterInstructions,
- Name = CounterName,
- });
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: CounterName,
+ instructions: CounterInstructions,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agentCounter = new(assistant, this.AssistantClient);
AgentGroupChat chat = CreateGroupChat();
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
index c9364bc2b2a9..fc28c3c683dd 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs
@@ -4,6 +4,7 @@
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
@@ -11,7 +12,7 @@ namespace Agents;
/// Demonstrate consuming "streaming" message for and
/// both participating in an .
///
-public class MixedChat_Streaming(ITestOutputHelper output) : BaseAgentsTest(output)
+public class MixedChat_Streaming(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -45,16 +46,16 @@ public async Task UseStreamingAgentChatAsync()
Kernel = this.CreateKernelWithChatCompletion(),
};
- OpenAIAssistantAgent agentWriter =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = CopyWriterInstructions,
- Name = CopyWriterName,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: CopyWriterName,
+ instructions: CopyWriterInstructions,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient);
// Create a chat for agent interaction.
AgentGroupChat chat =
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
index 83ea083ec674..f23e7ab952b7 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
@@ -3,7 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
+using OpenAI.Assistants;
namespace Agents;
@@ -11,30 +11,22 @@ namespace Agents;
/// Demonstrate using code-interpreter with to
/// produce image content displays the requested charts.
///
-public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output)
+public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAssistantTest(output)
{
- private const string AgentName = "ChartMaker";
- private const string AgentInstructions = "Create charts as requested without explanation.";
-
[Fact]
public async Task GenerateChartWithOpenAIAssistantAgentAsync()
{
- OpenAIClientProvider provider = this.GetClientProvider();
-
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ "ChartMaker",
+ instructions: "Create charts as requested without explanation.",
+ enableCodeInterpreter: true,
+ metadata: SampleMetadata);
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = AgentInstructions,
- Name = AgentName,
- EnableCodeInterpreter = true,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new());
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Create a chat for agent interaction.
AgentGroupChat chat = new();
@@ -58,7 +50,7 @@ Sum 426 1622 856 2904
}
finally
{
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
@@ -71,7 +63,7 @@ async Task InvokeAgentAsync(string input)
await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
this.WriteAgentChatMessage(response);
- await this.DownloadResponseImageAsync(fileClient, response);
+ await this.DownloadResponseImageAsync(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
index a0d48bf94eaa..915861ab2a99 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
@@ -3,7 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
+using OpenAI.Assistants;
using Resources;
namespace Agents;
@@ -11,32 +11,24 @@ namespace Agents;
///
/// Demonstrate using code-interpreter to manipulate and generate csv files with .
///
-public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output)
+public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync()
{
- OpenAIClientProvider provider = this.GetClientProvider();
+ await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!;
+ string fileId = await this.Client.UploadAssistantFileAsync(stream, "sales.csv");
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ enableCodeInterpreter: true,
+ codeInterpreterFileIds: [fileId],
+ metadata: SampleMetadata);
- OpenAIFile uploadFile =
- await fileClient.UploadFileAsync(
- new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!),
- "sales.csv",
- FileUploadPurpose.Assistants);
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- EnableCodeInterpreter = true,
- CodeInterpreterFileIds = [uploadFile.Id],
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Create a chat for agent interaction.
AgentGroupChat chat = new();
@@ -50,8 +42,8 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await agent.DeleteAsync();
- await fileClient.DeleteFileAsync(uploadFile.Id);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+ await this.Client.DeleteFileAsync(fileId);
}
// Local function to invoke agent and display the conversation messages.
@@ -64,7 +56,7 @@ async Task InvokeAgentAsync(string input)
await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
this.WriteAgentChatMessage(response);
- await this.DownloadResponseContentAsync(fileClient, response);
+ await this.DownloadResponseContentAsync(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs
index c3b3a851bc93..a1493025b5a4 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs
@@ -5,6 +5,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
@@ -13,7 +14,7 @@ namespace Agents;
/// filters with
/// via .
///
-public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAgentsTest(output)
+public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseFunctionInvocationFilterAsync()
@@ -78,7 +79,7 @@ private async Task InvokeAssistantAsync(OpenAIAssistantAgent agent)
finally
{
await chat.ResetAsync();
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -101,7 +102,7 @@ private async Task InvokeAssistantStreamingAsync(OpenAIAssistantAgent agent)
finally
{
await chat.ResetAsync();
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -118,19 +119,19 @@ private void WriteChatHistory(IEnumerable history)
private async Task CreateAssistantAsync(Kernel kernel)
{
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- this.GetClientProvider(),
- new OpenAIAssistantDefinition(base.Model)
- {
- Instructions = "Answer questions about the menu.",
- Metadata = AssistantSampleMetadata,
- },
- kernel: kernel
- );
-
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ instructions: "Answer questions about the menu.",
+ metadata: SampleMetadata);
+
+ // Create the agent
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- agent.Kernel.Plugins.Add(plugin);
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin])
+ {
+ Kernel = kernel
+ };
return agent;
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
index 39ff0f0fb97c..493b920f0d9d 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs
@@ -3,35 +3,31 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Agents;
///
/// Demonstrate consuming "streaming" message for .
///
-public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAgentsTest(output)
+public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseStreamingAssistantAgentAsync()
{
- const string AgentName = "Parrot";
- const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = AgentInstructions,
- Name = AgentName,
- EnableCodeInterpreter = true,
- Metadata = AssistantSampleMetadata,
- });
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: "Parrot",
+ instructions: "Repeat the user message in the voice of a pirate and then end with a parrot sound.",
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, threadId, "Fortune favors the bold.");
@@ -45,27 +41,20 @@ await OpenAIAssistantAgent.CreateAsync(
[Fact]
public async Task UseStreamingAssistantAgentWithPluginAsync()
{
- const string AgentName = "Host";
- const string AgentInstructions = "Answer questions about the menu.";
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = AgentInstructions,
- Name = AgentName,
- Metadata = AssistantSampleMetadata,
- });
-
- // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: "Host",
+ instructions: "Answer questions about the menu.",
+ metadata: SampleMetadata);
+
+ // Create the agent
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- agent.Kernel.Plugins.Add(plugin);
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]);
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, threadId, "What is the special soup and its price?");
@@ -78,24 +67,20 @@ await OpenAIAssistantAgent.CreateAsync(
[Fact]
public async Task UseStreamingAssistantWithCodeInterpreterAsync()
{
- const string AgentName = "MathGuy";
- const string AgentInstructions = "Solve math problems with code.";
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = AgentInstructions,
- Name = AgentName,
- EnableCodeInterpreter = true,
- Metadata = AssistantSampleMetadata,
- });
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name: "MathGuy",
+ instructions: "Solve math problems with code.",
+ enableCodeInterpreter: true,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
await InvokeAgentAsync(agent, threadId, "Is 191 a prime number?");
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
index 3937635203a4..3fcc8f3d4dd4 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs
@@ -4,13 +4,14 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
using Microsoft.SemanticKernel.PromptTemplates.Liquid;
+using OpenAI.Assistants;
namespace Agents;
///
/// Demonstrate parameterized template instruction for .
///
-public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAgentsTest(output)
+public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAssistantTest(output)
{
private readonly static (string Input, string? Style)[] s_inputs =
[
@@ -23,23 +24,25 @@ private readonly static (string Input, string? Style)[] s_inputs =
[Fact]
public async Task InvokeAgentWithInstructionsAsync()
{
- // Instruction based template always proceseed by KernelPromptTemplateFactory
- OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions =
- """
- Write a one verse poem on the requested topic in the styles of {{$style}}.
- Always state the requested style of the poem.
- """,
- Metadata = AssistantSampleMetadata
- },
- kernel: new Kernel(),
- defaultArguments: new KernelArguments()
- {
- {"style", "haiku"}
- });
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ instructions:
+ """
+ Write a one verse poem on the requested topic in the styles of {{$style}}.
+ Always state the requested style of the poem.
+ """,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient)
+ {
+ Arguments =
+ {
+ {"style", "haiku"}
+ },
+ };
await InvokeAssistantAgentWithTemplateAsync(agent);
}
@@ -84,24 +87,27 @@ private async Task InvokeAssistantAgentWithTemplateAsync(
string? templateFormat = null,
IPromptTemplateFactory? templateFactory = null)
{
- // Define the agent
- OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateFromTemplateAsync(
- clientProvider: this.GetClientProvider(),
- capabilities: new OpenAIAssistantCapabilities(this.Model)
- {
- Metadata = AssistantSampleMetadata
- },
- kernel: new Kernel(),
- defaultArguments: new KernelArguments()
- {
- {"style", "haiku"}
- },
- templateConfig: new PromptTemplateConfig
- {
- Template = instructionTemplate,
- TemplateFormat = templateFormat,
- },
- templateFactory);
+ PromptTemplateConfig config = new()
+ {
+ Template = instructionTemplate,
+ TemplateFormat = templateFormat,
+ };
+
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantFromTemplateAsync(
+ this.Model,
+ config,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, plugins: null, config, templateFactory)
+ {
+ Arguments =
+ {
+ {"style", "haiku"}
+ },
+ };
await InvokeAssistantAgentWithTemplateAsync(agent);
}
@@ -109,7 +115,7 @@ private async Task InvokeAssistantAgentWithTemplateAsync(
private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent agent)
{
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
try
{
@@ -135,8 +141,8 @@ private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent ag
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
}
diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs
deleted file mode 100644
index c383ea9025f1..000000000000
--- a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using Microsoft.SemanticKernel;
-
-namespace Plugins;
-
-public sealed class LegacyMenuPlugin
-{
- ///
- /// Returns a mock item menu.
- ///
- [KernelFunction, Description("Provides a list of specials from the menu.")]
- [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
- public string[] GetSpecials(KernelArguments? arguments)
- {
- return
- [
- "Special Soup: Clam Chowder",
- "Special Salad: Cobb Salad",
- "Special Drink: Chai Tea",
- ];
- }
-
- ///
- /// Returns a mock item price.
- ///
- [KernelFunction, Description("Provides the price of the requested menu item.")]
- public string GetItemPrice(
- [Description("The name of the menu item.")]
- string menuItem,
- KernelArguments? arguments)
- {
- return "$9.99";
- }
-
- ///
- /// An item is 86'd when the kitchen cannot serve due to running out of ingredients.
- ///
- [KernelFunction, Description("Returns true if the kitchen has ran out of the item.")]
- public bool IsItem86d(
- [Description("The name of the menu item.")]
- string menuItem,
- [Description("The number of items requested.")]
- int count,
- KernelArguments? arguments)
- {
- return count < 3;
- }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs
index 8c7b049e1107..1f7c0a18f8a3 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs
@@ -11,7 +11,7 @@ namespace GettingStarted.AzureAgents;
/// This example demonstrates similarity between using
/// and other agent types.
///
-public class Step01_AzureAIAgent(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step01_AzureAIAgent(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task UseTemplateForAzureAgentAsync()
@@ -20,14 +20,11 @@ public async Task UseTemplateForAzureAgentAsync()
string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync("gpt-4o", templateConfig.Name, templateConfig.Description, templateConfig.Template);
+ Agent definition = await this.AgentsClient.CreateAgentAsync("gpt-4o", templateConfig.Name, templateConfig.Description, templateConfig.Template);
// Instructions, Name and Description properties defined via the config.
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
- Kernel = new Kernel(),
- Arguments = new KernelArguments()
+ Arguments =
{
{ "topic", "Dog" },
{ "length", "3" },
@@ -35,7 +32,7 @@ public async Task UseTemplateForAzureAgentAsync()
};
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
try
{
@@ -52,8 +49,8 @@ await InvokeAgentAsync(
}
finally
{
- await client.DeleteThreadAsync(thread.Id);
- await client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
// Local function to invoke agent and display the response.
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
index fdebd13e5a65..4754acb92ff4 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
@@ -12,17 +12,8 @@ namespace GettingStarted.AzureAgents;
/// Demonstrate creation of with a ,
/// and then eliciting its response to explicit user messages.
///
-public class Step02_AzureAIAgent_Plugins : BaseAgentsTest
+public class Step02_AzureAIAgent_Plugins(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
- private readonly AzureAIClientProvider _clientProvider;
- private readonly AgentsClient _client;
-
- public Step02_AzureAIAgent_Plugins(ITestOutputHelper output) : base(output)
- {
- this._clientProvider = this.GetAzureProvider();
- this._client = this._clientProvider.Client.GetAgentsClient();
- }
-
[Fact]
public async Task UseAzureAgentWithPluginAsync()
{
@@ -33,7 +24,7 @@ public async Task UseAzureAgentWithPluginAsync()
name: "Host");
// Create a thread for the agent conversation.
- AgentThread thread = await this._client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -45,8 +36,8 @@ public async Task UseAzureAgentWithPluginAsync()
}
finally
{
- await this._client.DeleteThreadAsync(thread.Id);
- await this._client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
}
@@ -57,7 +48,7 @@ public async Task UseAzureAgentWithPluginEnumParameterAsync()
AzureAIAgent agent = await CreateAzureAgentAsync(plugin: KernelPluginFactory.CreateFromType());
// Create a thread for the agent conversation.
- AgentThread thread = await this._client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -66,21 +57,21 @@ public async Task UseAzureAgentWithPluginEnumParameterAsync()
}
finally
{
- await this._client.DeleteThreadAsync(thread.Id);
- await this._client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
}
private async Task CreateAzureAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
{
// Define the agent
- Agent definition = await this._client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
name,
null,
instructions);
- AzureAIAgent agent = new(definition, _clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
index 3e2f58c55bf3..c71b7124b463 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.AzureAI;
@@ -14,7 +13,7 @@ namespace GettingStarted.AzureAgents;
/// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum
/// number of agent interactions.
///
-public class Step03_AzureAIAgent_Chat(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step03_AzureAIAgent_Chat(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -40,20 +39,18 @@ Consider suggestions when refining an idea.
public async Task UseGroupChatWithTwoAgentsAsync()
{
// Define the agents
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent reviewerModel = await client.CreateAgentAsync(
+ Agent reviewerModel = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
ReviewerName,
null,
ReviewerInstructions);
- AzureAIAgent agentReviewer = new(reviewerModel, clientProvider);
- Agent writerModel = await client.CreateAgentAsync(
+ AzureAIAgent agentReviewer = new(reviewerModel, this.AgentsClient);
+ Agent writerModel = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
CopyWriterName,
null,
CopyWriterInstructions);
- AzureAIAgent agentWriter = new(writerModel, clientProvider);
+ AzureAIAgent agentWriter = new(writerModel, this.AgentsClient);
// Create a chat for agent interaction.
AgentGroupChat chat =
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
index eb91810100f4..551951a81a49 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
@@ -10,24 +10,22 @@ namespace GettingStarted.AzureAgents;
///
/// Demonstrate using code-interpreter on .
///
-public class Step04_AzureAIAgent_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step04_AzureAIAgent_CodeInterpreter(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task UseCodeInterpreterToolWithAgentAsync()
{
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
tools: [new CodeInterpreterToolDefinition()]);
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -36,8 +34,8 @@ public async Task UseCodeInterpreterToolWithAgentAsync()
}
finally
{
- await client.DeleteThreadAsync(thread.Id);
- await client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
index 3cb26df247d6..dba8ff1264dd 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
@@ -11,7 +11,7 @@ namespace GettingStarted.AzureAgents;
///
/// Demonstrate using code-interpreter on .
///
-public class Step05_AzureAIAgent_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step05_AzureAIAgent_FileSearch(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task UseFileSearchToolWithAgentAsync()
@@ -19,14 +19,12 @@ public async Task UseFileSearchToolWithAgentAsync()
// Define the agent
await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- AgentFile fileInfo = await client.UploadFileAsync(stream, AgentFilePurpose.Agents, "employees.pdf");
+ AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "employees.pdf");
VectorStore fileStore =
- await client.CreateVectorStoreAsync(
+ await this.AgentsClient.CreateVectorStoreAsync(
[fileInfo.Id],
- metadata: new Dictionary() { { AssistantSampleMetadataKey, bool.TrueString } });
- Agent agentModel = await client.CreateAgentAsync(
+ metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
+ Agent agentModel = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
tools: [new FileSearchToolDefinition()],
toolResources: new()
@@ -36,11 +34,11 @@ await client.CreateVectorStoreAsync(
VectorStoreIds = { fileStore.Id },
}
},
- metadata: new Dictionary() { { AssistantSampleMetadataKey, bool.TrueString } });
- AzureAIAgent agent = new(agentModel, clientProvider);
+ metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
+ AzureAIAgent agent = new(agentModel, this.AgentsClient);
// Create a thread associated for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -51,10 +49,10 @@ await client.CreateVectorStoreAsync(
}
finally
{
- await client.DeleteThreadAsync(thread.Id);
- await client.DeleteAgentAsync(agent.Id);
- await client.DeleteVectorStoreAsync(fileStore.Id);
- await client.DeleteFileAsync(fileInfo.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteVectorStoreAsync(fileStore.Id);
+ await this.AgentsClient.DeleteFileAsync(fileInfo.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
index 19ff5cd89865..54019df77be4 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
@@ -15,7 +15,7 @@ namespace GettingStarted.AzureAgents;
/// Note: Open API invocation does not involve kernel function calling or kernel filters.
/// Azure Function invocation is managed entirely by the Azure AI Agent service.
///
-public class Step06_AzureAIAgent_OpenAPI(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step06_AzureAIAgent_OpenAPI(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
[Fact]
public async Task UseOpenAPIToolWithAgentAsync()
@@ -25,22 +25,20 @@ public async Task UseOpenAPIToolWithAgentAsync()
string apiWeather = EmbeddedResource.Read("weather.json");
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
TestConfiguration.AzureAI.ChatModelId,
tools:
[
new OpenApiToolDefinition("RestCountries", "Retrieve country information", BinaryData.FromString(apiCountries), new OpenApiAnonymousAuthDetails()),
new OpenApiToolDefinition("Weather", "Retrieve weather by location", BinaryData.FromString(apiWeather), new OpenApiAnonymousAuthDetails())
]);
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -50,8 +48,8 @@ public async Task UseOpenAPIToolWithAgentAsync()
}
finally
{
- await client.DeleteThreadAsync(thread.Id);
- await client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
index 9364048c67b8..f4ca77e75c5e 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
@@ -13,7 +13,7 @@ namespace GettingStarted.AzureAgents;
/// when the agent is created. This is useful if you want to retrieve the agent later and
/// then dynamically check what function tools it requires.
///
-public class Step07_AzureAIAgent_Functions(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step07_AzureAIAgent_Functions(ITestOutputHelper output) : BaseAzureAgentTest(output)
{
private const string HostName = "Host";
private const string HostInstructions = "Answer questions about the menu.";
@@ -22,22 +22,19 @@ public class Step07_AzureAIAgent_Functions(ITestOutputHelper output) : BaseAgent
public async Task UseSingleAgentWithFunctionToolsAsync()
{
// Define the agent
- AzureAIClientProvider clientProvider = this.GetAzureProvider();
- AgentsClient client = clientProvider.Client.GetAgentsClient();
-
// In this sample the function tools are added to the agent this is
// important if you want to retrieve the agent later and then dynamically check
// what function tools it requires.
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
var tools = plugin.Select(f => f.ToToolDefinition(plugin.Name));
- Agent definition = await client.CreateAgentAsync(
+ Agent definition = await this.AgentsClient.CreateAgentAsync(
model: TestConfiguration.AzureAI.ChatModelId,
name: HostName,
description: null,
instructions: HostInstructions,
tools: tools);
- AzureAIAgent agent = new(definition, clientProvider)
+ AzureAIAgent agent = new(definition, this.AgentsClient)
{
Kernel = new Kernel(),
};
@@ -46,7 +43,7 @@ public async Task UseSingleAgentWithFunctionToolsAsync()
agent.Kernel.Plugins.Add(plugin);
// Create a thread for the agent conversation.
- AgentThread thread = await client.CreateThreadAsync(metadata: AssistantSampleMetadata);
+ AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -58,8 +55,8 @@ public async Task UseSingleAgentWithFunctionToolsAsync()
}
finally
{
- await client.DeleteThreadAsync(thread.Id);
- await client.DeleteAgentAsync(agent.Id);
+ await this.AgentsClient.DeleteThreadAsync(thread.Id);
+ await this.AgentsClient.DeleteAgentAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
index 28672070bdd5..0196bdd122e9 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
@@ -1,15 +1,15 @@
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.Assistants;
using Resources;
namespace GettingStarted.OpenAIAssistants;
///
-/// This example demonstrates similarity between using
-/// and other agent types.
+/// This example demonstrates using with templatized instructions.
///
-public class Step01_Assistant(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step01_Assistant(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseTemplateForAssistantAgentAsync()
@@ -19,23 +19,18 @@ public async Task UseTemplateForAssistantAgentAsync()
PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
// Instructions, Name and Description properties defined via the config.
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateFromTemplateAsync(
- clientProvider: this.GetClientProvider(),
- capabilities: new OpenAIAssistantCapabilities(this.Model)
- {
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel(),
- defaultArguments: new KernelArguments()
- {
- { "topic", "Dog" },
- { "length", "3" },
- },
- templateConfig);
+ Assistant definition = await this.AssistantClient.CreateAssistantFromTemplateAsync(this.Model, templateConfig, metadata: SampleMetadata);
+ OpenAIAssistantAgent agent = new(definition, this.AssistantClient)
+ {
+ Arguments =
+ {
+ { "topic", "Dog" },
+ { "length", "3" },
+ },
+ };
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
try
{
@@ -52,8 +47,8 @@ await InvokeAgentAsync(
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
// Local function to invoke agent and display the response.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
index 8778bf982959..3eb893a8871e 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
@@ -2,6 +2,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
using Plugins;
namespace GettingStarted.OpenAIAssistants;
@@ -10,7 +11,7 @@ namespace GettingStarted.OpenAIAssistants;
/// Demonstrate creation of with a ,
/// and then eliciting its response to explicit user messages.
///
-public class Step02_Assistant_Plugins(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step02_Assistant_Plugins(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseAssistantWithPluginAsync()
@@ -22,7 +23,7 @@ public async Task UseAssistantWithPluginAsync()
name: "Host");
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -34,8 +35,8 @@ public async Task UseAssistantWithPluginAsync()
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -46,7 +47,7 @@ public async Task UseAssistantWithPluginEnumParameterAsync()
OpenAIAssistantAgent agent = await CreateAssistantAgentAsync(plugin: KernelPluginFactory.CreateFromType());
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -55,27 +56,23 @@ public async Task UseAssistantWithPluginEnumParameterAsync()
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
private async Task CreateAssistantAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
{
- // Create the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Instructions = instructions,
- Name = name,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ name,
+ instructions: instructions,
+ metadata: SampleMetadata);
- // Add to the agent's Kernel
- agent.Kernel.Plugins.Add(plugin);
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]);
return agent;
}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
index a06c7ef8a97f..a9d8f5ead9e0 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
@@ -2,6 +2,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
using Resources;
namespace GettingStarted.OpenAIAssistants;
@@ -9,7 +10,7 @@ namespace GettingStarted.OpenAIAssistants;
///
/// Demonstrate providing image input to .
///
-public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAssistantTest(output)
{
///
/// Azure currently only supports message of type=text.
@@ -19,23 +20,21 @@ public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(
[Fact]
public async Task UseImageContentWithAssistantAsync()
{
- // Define the agent
- OpenAIClientProvider provider = this.GetClientProvider();
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Upload an image
await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!;
- string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg");
+ string fileId = await this.Client.UploadAssistantFileAsync(imageStream, "cat.jpg");
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -48,9 +47,9 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
- await provider.Client.GetOpenAIFileClient().DeleteFileAsync(fileId);
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+ await this.Client.DeleteFileAsync(fileId);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
index e52cc0221074..3de017d422a3 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
@@ -2,30 +2,30 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace GettingStarted.OpenAIAssistants;
///
/// Demonstrate using code-interpreter on .
///
-public class Step04_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step04_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseCodeInterpreterToolWithAssistantAgentAsync()
{
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new(this.Model)
- {
- EnableCodeInterpreter = true,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ enableCodeInterpreter: true,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -34,8 +34,8 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
index 02af1f1aec17..361c9c0621e9 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
@@ -3,8 +3,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
-using OpenAI.VectorStores;
+using OpenAI.Assistants;
using Resources;
namespace GettingStarted.OpenAIAssistants;
@@ -12,46 +11,36 @@ namespace GettingStarted.OpenAIAssistants;
///
/// Demonstrate using code-interpreter on .
///
-public class Step05_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step05_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAssistantTest(output)
{
[Fact]
public async Task UseFileSearchToolWithAssistantAgentAsync()
{
- // Define the agent
- OpenAIClientProvider provider = this.GetClientProvider();
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- definition: new OpenAIAssistantDefinition(this.Model)
- {
- EnableFileSearch = true,
- Metadata = AssistantSampleMetadata,
- },
- kernel: new Kernel());
+ // Define the assistant
+ Assistant assistant =
+ await this.AssistantClient.CreateAssistantAsync(
+ this.Model,
+ enableFileSearch: true,
+ metadata: SampleMetadata);
+
+ // Create the agent
+ OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
// Upload file - Using a table of fictional employees.
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
- OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
+ string fileId = await this.Client.UploadAssistantFileAsync(stream, "employees.pdf");
// Create a vector-store
- VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
- CreateVectorStoreOperation result =
- await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
- new VectorStoreCreationOptions()
- {
- FileIds = { fileInfo.Id },
- Metadata = { { AssistantSampleMetadataKey, bool.TrueString } }
- });
+ string vectorStoreId =
+ await this.Client.CreateVectorStoreAsync(
+ [fileId],
+ waitUntilCompleted: true,
+ metadata: SampleMetadata);
// Create a thread associated with a vector-store for the agent conversation.
- string threadId =
- await agent.CreateThreadAsync(
- new OpenAIThreadCreationOptions
- {
- VectorStoreId = result.VectorStoreId,
- Metadata = AssistantSampleMetadata,
- });
+ string threadId = await this.AssistantClient.CreateThreadAsync(
+ vectorStoreId: vectorStoreId,
+ metadata: SampleMetadata);
// Respond to user input
try
@@ -62,10 +51,10 @@ await agent.CreateThreadAsync(
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
- await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId);
- await fileClient.DeleteFileAsync(fileInfo.Id);
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+ await this.Client.DeleteVectorStoreAsync(vectorStoreId);
+ await this.Client.DeleteFileAsync(fileId);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
index 7904eec2c0ce..024f8ab167ae 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -13,7 +12,7 @@ namespace GettingStarted.OpenAIAssistants;
/// when the assistant is created. This is useful if you want to retrieve the assistant later and
/// then dynamically check what function tools it requires.
///
-public class Step06_AssistantTool_Function(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step06_AssistantTool_Function(ITestOutputHelper output) : BaseAssistantTest(output)
{
private const string HostName = "Host";
private const string HostInstructions = "Answer questions about the menu.";
@@ -22,8 +21,6 @@ public class Step06_AssistantTool_Function(ITestOutputHelper output) : BaseAgent
public async Task UseSingleAssistantWithFunctionToolsAsync()
{
// Define the agent
- OpenAIClientProvider provider = this.GetClientProvider();
- AssistantClient client = provider.Client.GetAssistantClient();
AssistantCreationOptions creationOptions =
new()
{
@@ -31,8 +28,8 @@ public async Task UseSingleAssistantWithFunctionToolsAsync()
Instructions = HostInstructions,
Metadata =
{
- { AssistantSampleMetadataKey, bool.TrueString }
- }
+ { SampleMetadataKey, bool.TrueString }
+ },
};
// In this sample the function tools are added to the assistant this is
@@ -41,18 +38,14 @@ public async Task UseSingleAssistantWithFunctionToolsAsync()
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
plugin.Select(f => f.ToToolDefinition(plugin.Name)).ToList().ForEach(td => creationOptions.Tools.Add(td));
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- clientProvider: this.GetClientProvider(),
- modelId: this.Model,
- creationOptions: creationOptions,
- kernel: new Kernel());
+ Assistant definition = await this.AssistantClient.CreateAssistantAsync(this.Model, creationOptions);
+ OpenAIAssistantAgent agent = new(definition, this.AssistantClient);
// Add plugin to the agent's Kernel (same as direct Kernel usage).
agent.Kernel.Plugins.Add(plugin);
// Create a thread for the agent conversation.
- string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+ string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
// Respond to user input
try
@@ -64,8 +57,8 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await this.AssistantClient.DeleteThreadAsync(threadId);
+ await this.AssistantClient.DeleteAssistantAsync(agent.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index 5beb969bf090..276f2f6fb198 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using Azure.Identity;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
@@ -43,7 +44,7 @@ public async Task UseDependencyInjectionToCreateAgentAsync()
serviceContainer.AddAzureOpenAIChatCompletion(
TestConfiguration.AzureOpenAI.ChatDeploymentName,
TestConfiguration.AzureOpenAI.Endpoint,
- TestConfiguration.AzureOpenAI.ApiKey);
+ new AzureCliCredential());
}
// Transient Kernel as each agent may customize its Kernel instance with plug-ins.
diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs
index c501029b8feb..eab2f6532fbf 100644
--- a/dotnet/src/Agents/Abstractions/Agent.cs
+++ b/dotnet/src/Agents/Abstractions/Agent.cs
@@ -38,14 +38,19 @@ public abstract class Agent
public string? Name { get; init; }
///
- /// Gets an for this .
+ /// A for this .
///
- public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
+ public ILoggerFactory? LoggerFactory { get; init; }
///
- /// Gets the associated with this .
+ /// The associated with this .
///
- protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
+ protected ILogger Logger => this._logger ??= this.ActiveLoggerFactory.CreateLogger(this.GetType());
+
+ ///
+ /// Get the active logger factory, if defined; otherwise, provide the default.
+ ///
+ protected virtual ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? NullLoggerFactory.Instance;
///
/// Set of keys to establish channel affinity. Minimum expected key-set:
diff --git a/dotnet/src/Agents/Abstractions/KernelAgent.cs b/dotnet/src/Agents/Abstractions/KernelAgent.cs
index 3ce5e041f346..6a46599a1788 100644
--- a/dotnet/src/Agents/Abstractions/KernelAgent.cs
+++ b/dotnet/src/Agents/Abstractions/KernelAgent.cs
@@ -3,6 +3,7 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
namespace Microsoft.SemanticKernel.Agents;
@@ -40,6 +41,9 @@ public abstract class KernelAgent : Agent
///
public IPromptTemplate? Template { get; protected set; }
+ ///
+ protected override ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? this.Kernel.LoggerFactory;
+
///
/// Formats the system instructions for the agent.
///
diff --git a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
index fb3f3ef8196b..1103ca422a05 100644
--- a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
+++ b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
@@ -20,7 +20,7 @@
-
+
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
index 7e5069a33e4a..1e58be54ad9f 100644
--- a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
+++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
@@ -1,7 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
-using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.Projects;
@@ -40,10 +39,6 @@ public static class Tools
///
public const string CodeInterpreterMetadataKey = "code";
- private readonly AzureAIClientProvider _provider;
- private readonly AgentsClient _client;
- private readonly string[] _channelKeys;
-
///
/// Gets the assistant definition.
///
@@ -54,6 +49,36 @@ public static class Tools
///
public RunPollingOptions PollingOptions { get; } = new();
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The agent model definition.
+ /// An instance.
+ /// An optional template factory.
+ public AzureAIAgent(
+ Azure.AI.Projects.Agent model,
+ AgentsClient client,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ this.Client = client;
+ this.Definition = model;
+ this.Description = this.Definition.Description;
+ this.Id = this.Definition.Id;
+ this.Name = this.Definition.Name;
+ this.Instructions = this.Definition.Instructions;
+
+ if (templateFactory != null)
+ {
+ PromptTemplateConfig templateConfig = new(this.Instructions);
+ this.Template = templateFactory.Create(templateConfig);
+ }
+ }
+
+ ///
+ /// %%%
+ ///
+ public AgentsClient Client { get; }
+
///
/// Adds a message to the specified thread.
///
@@ -65,7 +90,7 @@ public static class Tools
///
public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
{
- return AgentThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken);
+ return AgentThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
}
///
@@ -76,7 +101,7 @@ public Task AddChatMessageAsync(string threadId, ChatMessageContent message, Can
/// An asynchronous enumeration of messages.
public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
{
- return AgentThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken);
+ return AgentThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
}
///
@@ -120,8 +145,22 @@ public IAsyncEnumerable InvokeAsync(
{
return ActivityExtensions.RunWithActivityAsync(
() => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
- () => this.InternalInvokeAsync(threadId, options, arguments, kernel, cancellationToken),
+ () => InternalInvokeAsync(),
cancellationToken);
+
+ async IAsyncEnumerable InternalInvokeAsync()
+ {
+ kernel ??= this.Kernel;
+ arguments = this.MergeArguments(arguments);
+
+ await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+ {
+ if (isVisible)
+ {
+ yield return message;
+ }
+ }
+ }
}
///
@@ -169,8 +208,16 @@ public IAsyncEnumerable InvokeStreamingAsync(
{
return ActivityExtensions.RunWithActivityAsync(
() => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
- () => this.InternalInvokeStreamingAsync(threadId, options, arguments, kernel, messages, cancellationToken),
+ () => InternalInvokeStreamingAsync(),
cancellationToken);
+
+ IAsyncEnumerable InternalInvokeStreamingAsync()
+ {
+ kernel ??= this.Kernel;
+ arguments = this.MergeArguments(arguments);
+
+ return AgentThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
+ }
}
///
@@ -178,11 +225,8 @@ protected override IEnumerable GetChannelKeys()
{
// Distinguish from other channel types.
yield return typeof(AzureAIChannel).FullName!;
-
- foreach (string key in this._channelKeys)
- {
- yield return key;
- }
+ // Distinguish based on client instance.
+ yield return this.Client.GetHashCode().ToString();
}
///
@@ -190,14 +234,14 @@ protected override async Task CreateChannelAsync(CancellationToken
{
this.Logger.LogAzureAIAgentCreatingChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel));
- string threadId = await AgentThreadActions.CreateThreadAsync(this._client, cancellationToken).ConfigureAwait(false);
+ string threadId = await AgentThreadActions.CreateThreadAsync(this.Client, cancellationToken).ConfigureAwait(false);
this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), threadId);
AzureAIChannel channel =
- new(this._client, threadId)
+ new(this.Client, threadId)
{
- Logger = this.LoggerFactory.CreateLogger()
+ Logger = this.ActiveLoggerFactory.CreateLogger()
};
this.Logger.LogAzureAIAgentCreatedChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel), threadId);
@@ -217,76 +261,10 @@ protected override async Task RestoreChannelAsync(string channelSt
this.Logger.LogAzureAIAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
- AgentThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+ AgentThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
this.Logger.LogAzureAIAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
- return new AzureAIChannel(this._client, thread.Id);
- }
-
- ///
- /// Initializes a new instance of the class.
- ///
- /// The agent model definition.
- /// An instance.
- /// An optional template factory.
- public AzureAIAgent(
- Azure.AI.Projects.Agent model,
- AzureAIClientProvider clientProvider,
- IPromptTemplateFactory? templateFactory = null)
- {
- this._provider = clientProvider;
- this._client = clientProvider.Client.GetAgentsClient();
- this._channelKeys = [.. clientProvider.ConfigurationKeys];
-
- this.Definition = model;
- this.Description = this.Definition.Description;
- this.Id = this.Definition.Id;
- this.Name = this.Definition.Name;
- this.Instructions = this.Definition.Instructions;
- this.Kernel = new();
-
- if (templateFactory != null)
- {
- PromptTemplateConfig templateConfig = new(this.Instructions);
- this.Template = templateFactory.Create(templateConfig);
- }
+ return new AzureAIChannel(this.Client, thread.Id);
}
-
- #region private
-
- private async IAsyncEnumerable InternalInvokeAsync(
- string threadId,
- AzureAIInvocationOptions? options,
- KernelArguments? arguments = null,
- Kernel? kernel = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- kernel ??= this.Kernel;
- arguments = this.MergeArguments(arguments);
-
- await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
- {
- if (isVisible)
- {
- yield return message;
- }
- }
- }
-
- private IAsyncEnumerable InternalInvokeStreamingAsync(
- string threadId,
- AzureAIInvocationOptions? options,
- KernelArguments? arguments = null,
- Kernel? kernel = null,
- ChatHistory? messages = null,
- CancellationToken cancellationToken = default)
- {
- kernel ??= this.Kernel;
- arguments = this.MergeArguments(arguments);
-
- return AgentThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
- }
-
- #endregion
}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
index 43b79d86bf2f..9082225ef698 100644
--- a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
+++ b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
@@ -14,11 +14,18 @@ namespace Microsoft.SemanticKernel.Agents.AzureAI;
///
public sealed class AzureAIClientProvider
{
+ private AgentsClient? _agentsClient;
+
///
/// Gets an active client instance.
///
public AIProjectClient Client { get; }
+ ///
+ /// Gets an active assistant client instance.
+ ///
+ public AgentsClient AgentsClient => this._agentsClient ??= this.Client.GetAgentsClient();
+
///
/// Configuration keys required for management.
///
diff --git a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
index b20fd001c5a3..1e18ff4b2282 100644
--- a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
@@ -87,7 +87,7 @@ protected sealed override Task CreateChannelAsync(CancellationToke
ChatHistoryChannel channel =
new()
{
- Logger = this.LoggerFactory.CreateLogger()
+ Logger = this.ActiveLoggerFactory.CreateLogger()
};
return Task.FromResult(channel);
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index b40116f411d0..c03d06e6e999 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -20,7 +20,7 @@
-
+
diff --git a/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
new file mode 100644
index 000000000000..706186df7e68
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
@@ -0,0 +1,172 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Convenience extensions for .
+///
+public static class AssistantClientExtensions
+{
+ ///
+ /// Creates an assistant asynchronously with the specified options.
+ ///
+ /// The assistant client.
+ /// The model identifier.
+ /// The name of the assistant.
+ /// The description of the assistant.
+ /// The instructions for the assistant.
+ /// Whether to enable the code interpreter tool.
+ /// The file IDs for the code interpreter tool.
+ /// Whether to enable the file search tool.
+ /// The vector store identifier.
+ /// The temperature setting for the assistant.
+ /// The nucleus sampling factor for the assistant.
+ /// The response format for the assistant.
+ /// The metadata for the assistant.
+ /// The cancellation token.
+ /// A task that represents the asynchronous operation. The task result contains the created assistant.
+ public static async Task CreateAssistantAsync(
+ this AssistantClient client,
+ string modelId,
+ string? name = null,
+ string? description = null,
+ string? instructions = null,
+ bool enableCodeInterpreter = false,
+ IReadOnlyList? codeInterpreterFileIds = null,
+ bool enableFileSearch = false,
+ string? vectorStoreId = null,
+ float? temperature = null,
+ float? topP = null,
+ AssistantResponseFormat? responseFormat = null,
+ IReadOnlyDictionary? metadata = null,
+ CancellationToken cancellationToken = default)
+ {
+ AssistantCreationOptions options =
+ new()
+ {
+ Name = name,
+ Description = description,
+ Instructions = instructions,
+ Temperature = temperature,
+ NucleusSamplingFactor = topP,
+ ResponseFormat = responseFormat,
+ };
+
+ if (metadata != null)
+ {
+ foreach (KeyValuePair item in metadata)
+ {
+ options.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ if (enableCodeInterpreter || (codeInterpreterFileIds?.Count ?? 0) > 0)
+ {
+ options.Tools.Add(ToolDefinition.CreateCodeInterpreter());
+ }
+
+ if (enableFileSearch || !string.IsNullOrEmpty(vectorStoreId))
+ {
+ options.Tools.Add(ToolDefinition.CreateFileSearch());
+ }
+
+ options.ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds);
+
+ Assistant assistant = await client.CreateAssistantAsync(modelId, options, cancellationToken).ConfigureAwait(false);
+
+ return assistant;
+ }
+
+ ///
+ /// Creates an assistant from a template asynchronously with the specified options.
+ ///
+ /// The assistant client.
+ /// The model identifier.
+ /// The prompt template configuration.
+ /// Whether to enable the code interpreter tool.
+ /// The file IDs for the code interpreter tool.
+ /// Whether to enable the file search tool.
+ /// The vector store identifier.
+ /// The temperature setting for the assistant.
+ /// The nucleus sampling factor for the assistant.
+ /// The response format for the assistant.
+ /// The metadata for the assistant.
+ /// The cancellation token.
+ /// A task that represents the asynchronous operation. The task result contains the created assistant.
+ public static Task CreateAssistantFromTemplateAsync(
+ this AssistantClient client,
+ string modelId,
+ PromptTemplateConfig config,
+ bool enableCodeInterpreter = false,
+ IReadOnlyList? codeInterpreterFileIds = null,
+ bool enableFileSearch = false,
+ string? vectorStoreId = null,
+ float? temperature = null,
+ float? topP = null,
+ AssistantResponseFormat? responseFormat = null,
+ IReadOnlyDictionary? metadata = null,
+ CancellationToken cancellationToken = default)
+ {
+ return
+ client.CreateAssistantAsync(
+ modelId,
+ config.Name,
+ config.Description,
+ config.Template,
+ enableCodeInterpreter,
+ codeInterpreterFileIds,
+ enableFileSearch,
+ vectorStoreId,
+ temperature,
+ topP,
+ responseFormat,
+ metadata,
+ cancellationToken);
+ }
+
+ ///
+ /// Creates a thread asynchronously with the specified options.
+ ///
+ /// The assistant client.
+ /// The initial messages for the thread.
+ /// The file IDs for the code interpreter tool.
+ /// The vector store identifier.
+ /// The metadata for the thread.
+ /// The cancellation token.
+ /// A task that represents the asynchronous operation. The task result contains the thread ID.
+ public static async Task CreateThreadAsync(
+ this AssistantClient client,
+ IEnumerable? messages = null,
+ IReadOnlyList? codeInterpreterFileIds = null,
+ string? vectorStoreId = null,
+ IReadOnlyDictionary? metadata = null,
+ CancellationToken cancellationToken = default)
+ {
+ ThreadCreationOptions options = new()
+ {
+ ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds)
+ };
+
+ if (messages != null)
+ {
+ options.InitialMessages.AddRange(messages.ToThreadInitializationMessages());
+ }
+
+ if (metadata != null)
+ {
+ foreach (KeyValuePair item in metadata)
+ {
+ options.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ AssistantThread thread = await client.CreateThreadAsync(options, cancellationToken).ConfigureAwait(false);
+
+ return thread.Id;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
new file mode 100644
index 000000000000..5cd0055d8456
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
@@ -0,0 +1,36 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Convenience extensions for converting .
+///
+public static class ChatContentMessageExtensions
+{
+ ///
+ /// Converts a instance to a .
+ ///
+ /// The chat message content to convert.
+ /// A instance.
+ public static ThreadInitializationMessage ToThreadInitializationMessage(this ChatMessageContent message)
+ {
+ return
+ new ThreadInitializationMessage(
+ role: message.Role.ToMessageRole(),
+ content: AssistantMessageFactory.GetMessageContents(message));
+ }
+
+ ///
+ /// Converts a collection of instances to a collection of instances.
+ ///
+ /// The collection of chat message contents to convert.
+ /// A collection of instances.
+ public static IEnumerable ToThreadInitializationMessages(this IEnumerable messages)
+ {
+ return messages.Select(message => message.ToThreadInitializationMessage());
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
deleted file mode 100644
index d1e7e0059494..000000000000
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-internal static class KernelExtensions
-{
- ///
- /// Retrieve a kernel function based on the tool name.
- ///
- public static KernelFunction GetKernelFunction(this Kernel kernel, string functionName, char delimiter)
- {
- string[] nameParts = functionName.Split(delimiter);
- return nameParts.Length switch
- {
- 2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]),
- _ => throw new KernelException($"Agent Failure - Unknown tool: {functionName}"),
- };
- }
-}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index a3ccf53d33be..d15dec19d6e0 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -15,20 +15,20 @@ public static class KernelFunctionExtensions
/// The source function
/// The plugin name
/// An OpenAI tool definition
- public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
+ public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string? pluginName = null)
{
if (function.Metadata.Parameters.Count > 0)
{
BinaryData parameterData = function.Metadata.CreateParameterSpec();
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
{
Description = function.Description,
Parameters = parameterData,
};
}
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
{
Description = function.Description
};
diff --git a/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
new file mode 100644
index 000000000000..7e63a32673f2
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
@@ -0,0 +1,110 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using OpenAI;
+using OpenAI.Assistants;
+using OpenAI.Files;
+using OpenAI.VectorStores;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Convenience extensions for .
+///
+public static class OpenAIClientExtensions
+{
+ ///
+ /// Creates a vector store asynchronously.
+ ///
+ /// The OpenAI client instance.
+ /// The collection of file identifiers to include in the vector store.
+ /// Indicates whether to wait until the operation is completed.
+ /// The name of the vector store.
+ /// The expiration policy for the vector store.
+ /// The chunking strategy for the vector store.
+ /// The metadata associated with the vector store.
+ /// The cancellation token to monitor for cancellation requests.
+ /// The identifier of the created vector store.
+ public static async Task CreateVectorStoreAsync(
+ this OpenAIClient client,
+ IEnumerable fileIds,
+ bool waitUntilCompleted = true,
+ string? storeName = null,
+ VectorStoreExpirationPolicy? expirationPolicy = null,
+ FileChunkingStrategy? chunkingStrategy = null,
+ IReadOnlyDictionary? metadata = null,
+ CancellationToken cancellationToken = default)
+ {
+ VectorStoreCreationOptions options = new()
+ {
+ Name = storeName,
+ ChunkingStrategy = chunkingStrategy,
+ ExpirationPolicy = expirationPolicy,
+ };
+
+ options.FileIds.AddRange(fileIds);
+
+ if (metadata != null)
+ {
+ foreach (KeyValuePair item in metadata)
+ {
+ options.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
+ CreateVectorStoreOperation result = await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted, options, cancellationToken).ConfigureAwait(false);
+
+ return result.VectorStoreId;
+ }
+
+ ///
+ /// Deletes a vector store asynchronously.
+ ///
+ /// The OpenAI client instance.
+ /// The identifier of the vector store to delete.
+ /// The cancellation token to monitor for cancellation requests.
+ /// A boolean indicating whether the vector store was successfully deleted.
+ public static async Task DeleteVectorStoreAsync(this OpenAIClient client, string vectorStoreId, CancellationToken cancellationToken = default)
+ {
+ VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
+ VectorStoreDeletionResult result = await vectorStoreClient.DeleteVectorStoreAsync(vectorStoreId, cancellationToken).ConfigureAwait(false);
+ return result.Deleted;
+ }
+
+ ///
+ /// Uploads a file to use with the assistant.
+ ///
+ /// The OpenAI client instance.
+ /// The content to upload.
+ /// The name of the file.
+ /// The to monitor for cancellation requests. The default is .
+ /// The file identifier.
+ ///
+ /// Use the directly for more advanced file operations.
+ ///
+ public static async Task UploadAssistantFileAsync(this OpenAIClient client, Stream stream, string name, CancellationToken cancellationToken = default)
+ {
+ OpenAIFileClient fileClient = client.GetOpenAIFileClient();
+
+ OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
+
+ return fileInfo.Id;
+ }
+
+ ///
+ /// Deletes a file asynchronously.
+ ///
+ /// The OpenAI client instance.
+ /// The identifier of the file to delete.
+ /// The cancellation token to monitor for cancellation requests.
+ /// A boolean indicating whether the file was successfully deleted.
+ public static async Task DeleteFileAsync(this OpenAIClient client, string fileId, CancellationToken cancellationToken = default)
+ {
+ OpenAIFileClient fileClient = client.GetOpenAIFileClient();
+ FileDeletionResult result = await fileClient.DeleteFileAsync(fileId, cancellationToken).ConfigureAwait(false);
+ return result.Deleted;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
index 532a8433c37c..cdcfdadf93ef 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
@@ -5,6 +5,8 @@
namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+#pragma warning disable CS0618 // Type or member is obsolete
+
///
/// Produce the for an assistant according to the requested configuration.
///
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
index cda0399f5e28..772c30630fe5 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+
using System.Collections.Generic;
-using Microsoft.SemanticKernel.ChatCompletion;
using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -8,62 +8,46 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
///
/// Factory for creating definition.
///
-///
-/// Improves testability.
-///
internal static class AssistantRunOptionsFactory
{
- ///
- /// Produce by reconciling and .
- ///
- /// The assistant definition
- /// Instructions to use for the run
- /// The run specific options
- public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, string? overrideInstructions, OpenAIAssistantInvocationOptions? invocationOptions)
+ public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions)
{
- int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount);
-
- RunCreationOptions options =
+ RunCreationOptions runOptions =
new()
{
- AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
+ AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions,
InstructionsOverride = overrideInstructions,
- MaxOutputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
- MaxInputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
- ModelOverride = invocationOptions?.ModelName,
- NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP),
- AllowParallelToolCalls = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
- ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null,
- Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature),
- TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null,
+ MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount,
+ MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount,
+ ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride,
+ NucleusSamplingFactor = invocationOptions?.NucleusSamplingFactor ?? defaultOptions?.NucleusSamplingFactor,
+ AllowParallelToolCalls = invocationOptions?.AllowParallelToolCalls ?? defaultOptions?.AllowParallelToolCalls,
+ ResponseFormat = invocationOptions?.ResponseFormat ?? defaultOptions?.ResponseFormat,
+ Temperature = invocationOptions?.Temperature ?? defaultOptions?.Temperature,
+ ToolConstraint = invocationOptions?.ToolConstraint ?? defaultOptions?.ToolConstraint,
+ TruncationStrategy = invocationOptions?.TruncationStrategy ?? defaultOptions?.TruncationStrategy,
};
- if (invocationOptions?.Metadata != null)
+ IList? additionalMessages = invocationOptions?.AdditionalMessages ?? defaultOptions?.AdditionalMessages;
+ if (additionalMessages != null)
{
- foreach (var metadata in invocationOptions.Metadata)
- {
- options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty);
- }
+ runOptions.AdditionalMessages.AddRange(additionalMessages);
}
- if (invocationOptions?.AdditionalMessages != null)
+ PopulateMetadata(defaultOptions, runOptions);
+ PopulateMetadata(invocationOptions, runOptions);
+
+ return runOptions;
+ }
+
+ private static void PopulateMetadata(RunCreationOptions? sourceOptions, RunCreationOptions targetOptions)
+ {
+ if (sourceOptions?.Metadata != null)
{
- foreach (ChatMessageContent message in invocationOptions.AdditionalMessages)
+ foreach (KeyValuePair item in sourceOptions.Metadata)
{
- ThreadInitializationMessage threadMessage = new(
- role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
- content: AssistantMessageFactory.GetMessageContents(message));
-
- options.AdditionalMessages.Add(threadMessage);
+ targetOptions.Metadata[item.Key] = item.Value ?? string.Empty;
}
}
-
- return options;
}
-
- private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct
- =>
- setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ?
- setting.Value :
- agentSetting;
}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index b8a0a3778745..64749cedff69 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -2,6 +2,7 @@
using System;
using System.ClientModel;
using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Net;
using System.Runtime.CompilerServices;
@@ -29,46 +30,6 @@ internal static class AssistantThreadActions
RunStatus.Cancelling,
];
- ///
- /// Create a new assistant thread.
- ///
- /// The assistant client
- /// The options for creating the thread
- /// The to monitor for cancellation requests. The default is .
- /// The thread identifier
- public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
- {
- ThreadCreationOptions createOptions =
- new()
- {
- ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds),
- };
-
- if (options?.Messages is not null)
- {
- foreach (ChatMessageContent message in options.Messages)
- {
- ThreadInitializationMessage threadMessage = new(
- role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
- content: AssistantMessageFactory.GetMessageContents(message));
-
- createOptions.InitialMessages.Add(threadMessage);
- }
- }
-
- if (options?.Metadata != null)
- {
- foreach (KeyValuePair item in options.Metadata)
- {
- createOptions.Metadata[item.Key] = item.Value;
- }
- }
-
- AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false);
-
- return thread.Id;
- }
-
///
/// Create a message in the specified thread.
///
@@ -152,20 +113,15 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
OpenAIAssistantAgent agent,
AssistantClient client,
string threadId,
- OpenAIAssistantInvocationOptions? invocationOptions,
+ RunCreationOptions? invocationOptions,
ILogger logger,
Kernel kernel,
KernelArguments? arguments,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
- if (agent.IsDeleted)
- {
- throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
- }
-
logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
- List tools = new(agent.Tools);
+ List tools = new(agent.Definition.Tools);
// Add unique functions from the Kernel which are not already present in the agent's tools
var functionToolNames = new HashSet(tools.OfType().Select(t => t.FunctionName));
@@ -176,7 +132,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
options.ToolsOverride.AddRange(tools);
@@ -386,29 +342,25 @@ async Task PollRunStatusAsync()
///
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
///
+ [ExcludeFromCodeCoverage]
public static async IAsyncEnumerable InvokeStreamingAsync(
OpenAIAssistantAgent agent,
AssistantClient client,
string threadId,
IList? messages,
- OpenAIAssistantInvocationOptions? invocationOptions,
+ RunCreationOptions? invocationOptions,
ILogger logger,
Kernel kernel,
KernelArguments? arguments,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
- if (agent.IsDeleted)
- {
- throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
- }
-
logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
- ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+ ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
options.ToolsOverride.AddRange(tools);
@@ -636,6 +588,7 @@ private static ChatMessageContent GenerateMessageContent(string? assistantName,
return content;
}
+ [ExcludeFromCodeCoverage]
private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update)
{
StreamingChatMessageContent content =
@@ -668,6 +621,7 @@ private static StreamingChatMessageContent GenerateStreamingMessageContent(strin
return content;
}
+ [ExcludeFromCodeCoverage]
private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update)
{
StreamingChatMessageContent content =
@@ -720,6 +674,7 @@ private static AnnotationContent GenerateAnnotationContent(TextAnnotation annota
};
}
+ [ExcludeFromCodeCoverage]
private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation)
{
string? fileId = null;
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index 95144b281c14..20ea1768a7e4 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
-using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text.Json;
@@ -13,7 +13,6 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Diagnostics;
using OpenAI.Assistants;
-using OpenAI.Files;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -30,15 +29,50 @@ public sealed class OpenAIAssistantAgent : KernelAgent
internal const string OptionsMetadataKey = "__run_options";
internal const string TemplateMetadataKey = "__template_format";
- private readonly OpenAIClientProvider _provider;
- private readonly Assistant _assistant;
- private readonly AssistantClient _client;
- private readonly string[] _channelKeys;
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The assistant definition.
+ /// The OpenAI provider for accessing the Assistant API service.
+ /// Optional collection of plugins to add to the kernel.
+ /// The prompt template configuration.
+ /// An optional factory to produce the for the agent.
+ public OpenAIAssistantAgent(
+ Assistant definition,
+ AssistantClient client,
+ IEnumerable? plugins = null,
+ PromptTemplateConfig? templateConfig = null,
+ IPromptTemplateFactory? templateFactory = null)
+ {
+ this.Client = client;
+
+ this.Definition = definition;
+
+ this.Description = this.Definition.Description;
+ this.Id = this.Definition.Id;
+ this.Name = this.Definition.Name;
+ this.Instructions = templateConfig?.Template ?? this.Definition.Instructions;
+
+ if (templateConfig != null)
+ {
+ this.Template = templateFactory?.Create(templateConfig);
+ }
+
+ if (plugins != null)
+ {
+ this.Kernel.Plugins.AddRange(plugins);
+ }
+ }
+
+ ///
+ /// Expose client for additional use.
+ ///
+ public AssistantClient Client { get; }
///
/// Gets the assistant definition.
///
- public OpenAIAssistantDefinition Definition { get; private init; }
+ public Assistant Definition { get; }
///
/// Gets a value that indicates whether the assistant has been deleted via .
@@ -46,6 +80,7 @@ public sealed class OpenAIAssistantAgent : KernelAgent
///
/// An assistant removed by other means will result in an exception when invoked.
///
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to manage the Assistant definition lifecycle.")]
public bool IsDeleted { get; private set; }
///
@@ -54,9 +89,9 @@ public sealed class OpenAIAssistantAgent : KernelAgent
public RunPollingOptions PollingOptions { get; } = new();
///
- /// Gets the predefined tools for run processing.
+ /// Gets or sets the run creation options for the assistant.
///
- internal IReadOnlyList Tools => this._assistant.Tools;
+ public RunCreationOptions? RunOptions { get; init; }
///
/// Create a new .
@@ -69,7 +104,8 @@ public sealed class OpenAIAssistantAgent : KernelAgent
/// An optional factory to produce the for the agent.
/// The to monitor for cancellation requests. The default is .
/// An instance.
- public async static Task CreateFromTemplateAsync(
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantFromTemplateAsync).")]
+ public static async Task CreateFromTemplateAsync(
OpenAIClientProvider clientProvider,
OpenAIAssistantCapabilities capabilities,
Kernel kernel,
@@ -89,7 +125,7 @@ public async static Task CreateFromTemplateAsync(
IPromptTemplate? template = templateFactory?.Create(templateConfig);
// Create the client
- AssistantClient client = CreateClient(clientProvider);
+ AssistantClient client = clientProvider.Client.GetAssistantClient();
// Create the assistant
AssistantCreationOptions assistantCreationOptions = templateConfig.CreateAssistantOptions(capabilities);
@@ -97,7 +133,7 @@ public async static Task CreateFromTemplateAsync(
// Instantiate the agent
return
- new OpenAIAssistantAgent(model, clientProvider, client)
+ new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
{
Kernel = kernel,
Arguments = defaultArguments,
@@ -114,6 +150,7 @@ public async static Task CreateFromTemplateAsync(
/// Optional default arguments, including any .
/// The to monitor for cancellation requests. The default is .
/// An instance.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantAsync).")]
public static async Task CreateAsync(
OpenAIClientProvider clientProvider,
OpenAIAssistantDefinition definition,
@@ -127,7 +164,7 @@ public static async Task CreateAsync(
Verify.NotNull(definition, nameof(definition));
// Create the client
- AssistantClient client = CreateClient(clientProvider);
+ AssistantClient client = clientProvider.Client.GetAssistantClient();
// Create the assistant
AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantOptions();
@@ -135,45 +172,7 @@ public static async Task CreateAsync(
// Instantiate the agent
return
- new OpenAIAssistantAgent(model, clientProvider, client)
- {
- Kernel = kernel,
- Arguments = defaultArguments ?? [],
- };
- }
-
- ///
- /// Create a new .
- ///
- /// OpenAI client provider for accessing the API service.
- /// OpenAI model id.
- /// The assistant creation options.
- /// The containing services, plugins, and other state for use throughout the operation.
- /// Optional default arguments, including any .
- /// The to monitor for cancellation requests. The default is .
- /// An instance
- public static async Task CreateAsync(
- OpenAIClientProvider clientProvider,
- string modelId,
- AssistantCreationOptions creationOptions,
- Kernel kernel,
- KernelArguments? defaultArguments = null,
- CancellationToken cancellationToken = default)
- {
- // Validate input
- Verify.NotNull(kernel, nameof(kernel));
- Verify.NotNull(clientProvider, nameof(clientProvider));
- Verify.NotNull(creationOptions, nameof(creationOptions));
-
- // Create the client
- AssistantClient client = CreateClient(clientProvider);
-
- // Create the assistant
- Assistant model = await client.CreateAssistantAsync(modelId, creationOptions, cancellationToken).ConfigureAwait(false);
-
- // Instantiate the agent
- return
- new OpenAIAssistantAgent(model, clientProvider, client)
+ new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
{
Kernel = kernel,
Arguments = defaultArguments ?? [],
@@ -183,15 +182,16 @@ public static async Task CreateAsync(
///
/// Retrieves a list of assistant definitions.
///
- /// The configuration for accessing the API service.
+ /// The configuration for accessing the API service.
/// The to monitor for cancellation requests. The default is .
/// A list of objects.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to query for assistant definitions (GetAssistantsAsync).")]
public static async IAsyncEnumerable ListDefinitionsAsync(
- OpenAIClientProvider provider,
+ OpenAIClientProvider clientProvider,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Create the client
- AssistantClient client = CreateClient(provider);
+ AssistantClient client = clientProvider.Client.GetAssistantClient();
// Query and enumerate assistant definitions
await foreach (Assistant model in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = AssistantCollectionOrder.Descending }, cancellationToken).ConfigureAwait(false))
@@ -210,6 +210,7 @@ public static async IAsyncEnumerable ListDefinitionsA
/// An optional factory to produce the for the agent.
/// The to monitor for cancellation requests. The default is .
/// An instance.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to retrieve an assistant definition (GetAssistantsAsync).")]
public static async Task RetrieveAsync(
OpenAIClientProvider clientProvider,
string id,
@@ -224,7 +225,7 @@ public static async Task RetrieveAsync(
Verify.NotNullOrWhiteSpace(id, nameof(id));
// Create the client
- AssistantClient client = CreateClient(clientProvider);
+ AssistantClient client = clientProvider.Client.GetAssistantClient();
// Retrieve the assistant
Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
@@ -237,7 +238,7 @@ public static async Task RetrieveAsync(
// Instantiate the agent
return
- new OpenAIAssistantAgent(model, clientProvider, client)
+ new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
{
Kernel = kernel,
Arguments = defaultArguments ?? [],
@@ -250,8 +251,9 @@ public static async Task RetrieveAsync(
///
/// The to monitor for cancellation requests. The default is .
/// The thread identifier.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
public Task CreateThreadAsync(CancellationToken cancellationToken = default)
- => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken);
+ => this.CreateThreadAsync(options: null, cancellationToken);
///
/// Creates a new assistant thread.
@@ -259,8 +261,14 @@ public Task CreateThreadAsync(CancellationToken cancellationToken = defa
/// The options for creating the thread.
/// The to monitor for cancellation requests. The default is .
/// The thread identifier.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
- => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken);
+ => this.Client.CreateThreadAsync(
+ options?.Messages,
+ options?.CodeInterpreterFileIds,
+ options?.VectorStoreId,
+ options?.Metadata,
+ cancellationToken);
///
/// Creates a new assistant thread.
@@ -268,6 +276,7 @@ public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, Canc
/// The thread identifier.
/// The to monitor for cancellation requests. The default is .
/// The thread identifier.
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to delete an existing thread.")]
public async Task DeleteThreadAsync(
string threadId,
CancellationToken cancellationToken = default)
@@ -275,30 +284,11 @@ public async Task DeleteThreadAsync(
// Validate input
Verify.NotNullOrWhiteSpace(threadId, nameof(threadId));
- ThreadDeletionResult result = await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+ ThreadDeletionResult result = await this.Client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
return result.Deleted;
}
- ///
- /// Uploads a file to use with the assistant.
- ///
- /// The content to upload.
- /// The name of the file.
- /// The to monitor for cancellation requests. The default is .
- /// The file identifier.
- ///
- /// Use the directly for more advanced file operations.
- ///
- public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default)
- {
- OpenAIFileClient client = this._provider.Client.GetOpenAIFileClient();
-
- OpenAIFile fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
-
- return fileInfo.Id;
- }
-
///
/// Adds a message to the specified thread.
///
@@ -310,9 +300,7 @@ public async Task UploadFileAsync(Stream stream, string name, Cancellati
///
public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
{
- this.ThrowIfDeleted();
-
- return AssistantThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken);
+ return AssistantThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
}
///
@@ -323,9 +311,7 @@ public Task AddChatMessageAsync(string threadId, ChatMessageContent message, Can
/// An asynchronous enumeration of messages.
public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
{
- this.ThrowIfDeleted();
-
- return AssistantThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken);
+ return AssistantThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
}
///
@@ -336,11 +322,12 @@ public IAsyncEnumerable GetThreadMessagesAsync(string thread
///
/// An assistant-based agent is not useable after deletion.
///
+ [Obsolete("Use the OpenAI.Assistants.AssistantClient to remove or otherwise modify the Assistant definition.")]
public async Task DeleteAsync(CancellationToken cancellationToken = default)
{
if (!this.IsDeleted)
{
- AssistantDeletionResult result = await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
+ AssistantDeletionResult result = await this.Client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
this.IsDeleted = result.Deleted;
}
@@ -379,15 +366,29 @@ public IAsyncEnumerable InvokeAsync(
///
public IAsyncEnumerable InvokeAsync(
string threadId,
- OpenAIAssistantInvocationOptions? options,
+ RunCreationOptions? options,
KernelArguments? arguments = null,
Kernel? kernel = null,
CancellationToken cancellationToken = default)
{
return ActivityExtensions.RunWithActivityAsync(
() => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
- () => this.InternalInvokeAsync(threadId, options, arguments, kernel, cancellationToken),
+ () => InternalInvokeAsync(),
cancellationToken);
+
+ async IAsyncEnumerable InternalInvokeAsync()
+ {
+ kernel ??= this.Kernel;
+ arguments = this.MergeArguments(arguments);
+
+ await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+ {
+ if (isVisible)
+ {
+ yield return message;
+ }
+ }
+ }
}
///
@@ -425,7 +426,7 @@ public IAsyncEnumerable InvokeStreamingAsync(
///
public IAsyncEnumerable InvokeStreamingAsync(
string threadId,
- OpenAIAssistantInvocationOptions? options,
+ RunCreationOptions? options,
KernelArguments? arguments = null,
Kernel? kernel = null,
ChatHistory? messages = null,
@@ -433,8 +434,16 @@ public IAsyncEnumerable InvokeStreamingAsync(
{
return ActivityExtensions.RunWithActivityAsync(
() => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
- () => this.InternalInvokeStreamingAsync(threadId, options, arguments, kernel, messages, cancellationToken),
+ () => InternalInvokeStreamingAsync(),
cancellationToken);
+
+ IAsyncEnumerable InternalInvokeStreamingAsync()
+ {
+ kernel ??= this.Kernel;
+ arguments = this.MergeArguments(arguments);
+
+ return AssistantThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
+ }
}
///
@@ -442,11 +451,8 @@ protected override IEnumerable GetChannelKeys()
{
// Distinguish from other channel types.
yield return typeof(OpenAIAssistantChannel).FullName!;
-
- foreach (string key in this._channelKeys)
- {
- yield return key;
- }
+ // Distinguish based on client instance.
+ yield return this.Client.GetHashCode().ToString();
}
///
@@ -454,14 +460,14 @@ protected override async Task CreateChannelAsync(CancellationToken
{
this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel));
- AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
+ AssistantThread thread = await this.Client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
OpenAIAssistantChannel channel =
- new(this._client, thread.Id)
+ new(this.Client, thread.Id)
{
- Logger = this.LoggerFactory.CreateLogger()
+ Logger = this.ActiveLoggerFactory.CreateLogger()
};
this.Logger.LogOpenAIAssistantAgentCreatedChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel), thread.Id);
@@ -469,14 +475,6 @@ protected override async Task CreateChannelAsync(CancellationToken
return channel;
}
- internal void ThrowIfDeleted()
- {
- if (this.IsDeleted)
- {
- throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {this.Id}.");
- }
- }
-
internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) =>
this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
@@ -487,36 +485,14 @@ protected override async Task RestoreChannelAsync(string channelSt
this.Logger.LogOpenAIAssistantAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
- AssistantThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+ AssistantThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
this.Logger.LogOpenAIAssistantAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
- return new OpenAIAssistantChannel(this._client, thread.Id);
- }
-
- #region private
-
- ///
- /// Initializes a new instance of the class.
- ///
- private OpenAIAssistantAgent(
- Assistant model,
- OpenAIClientProvider provider,
- AssistantClient client)
- {
- this._provider = provider;
- this._assistant = model;
- this._client = provider.Client.GetAssistantClient();
- this._channelKeys = provider.ConfigurationKeys.ToArray();
-
- this.Definition = CreateAssistantDefinition(model);
-
- this.Description = this._assistant.Description;
- this.Id = this._assistant.Id;
- this.Name = this._assistant.Name;
- this.Instructions = this._assistant.Instructions;
+ return new OpenAIAssistantChannel(this.Client, thread.Id);
}
+ [Obsolete]
private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model)
{
OpenAIAssistantExecutionOptions? options = null;
@@ -547,48 +523,4 @@ private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant mod
ExecutionOptions = options,
};
}
-
- private static AssistantClient CreateClient(OpenAIClientProvider config)
- {
- return config.Client.GetAssistantClient();
- }
-
- private async IAsyncEnumerable InternalInvokeAsync(
- string threadId,
- OpenAIAssistantInvocationOptions? options,
- KernelArguments? arguments = null,
- Kernel? kernel = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- this.ThrowIfDeleted();
-
- kernel ??= this.Kernel;
- arguments = this.MergeArguments(arguments);
-
- await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
- {
- if (isVisible)
- {
- yield return message;
- }
- }
- }
-
- private IAsyncEnumerable InternalInvokeStreamingAsync(
- string threadId,
- OpenAIAssistantInvocationOptions? options,
- KernelArguments? arguments = null,
- Kernel? kernel = null,
- ChatHistory? messages = null,
- CancellationToken cancellationToken = default)
- {
- this.ThrowIfDeleted();
-
- kernel ??= this.Kernel;
- arguments = this.MergeArguments(arguments);
-
- return AssistantThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
- }
-
- #endregion
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
index 94decedc8c0b..8274541862f1 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
@@ -7,6 +8,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// Defines the capabilities of an assistant.
///
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
public class OpenAIAssistantCapabilities
{
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 506f0a837ebf..1a7d0b085a43 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -30,16 +30,12 @@ protected override async Task ReceiveAsync(IEnumerable histo
OpenAIAssistantAgent agent,
CancellationToken cancellationToken)
{
- agent.ThrowIfDeleted();
-
return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
}
///
protected override IAsyncEnumerable InvokeStreamingAsync(OpenAIAssistantAgent agent, IList messages, CancellationToken cancellationToken = default)
{
- agent.ThrowIfDeleted();
-
return AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
index 9d15188b4c8e..af25620bfcd2 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Text.Json.Serialization;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -6,6 +7,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// Defines an assistant.
///
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
public sealed class OpenAIAssistantDefinition : OpenAIAssistantCapabilities
{
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
index a44a2f37eec6..7e3d72788fa6 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Text.Json.Serialization;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options".
///
+[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
public sealed class OpenAIAssistantExecutionOptions
{
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
index d16cea780a34..1e9cb83e9461 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
@@ -10,6 +11,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// This class is not applicable to usage.
///
+[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
public sealed class OpenAIAssistantInvocationOptions
{
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
index b65025cb245c..ab4f542eb49b 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -10,6 +10,7 @@
using Azure.Core;
using Microsoft.SemanticKernel.Http;
using OpenAI;
+using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -22,12 +23,18 @@ public sealed class OpenAIClientProvider
/// Specifies a key that avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
///
private const string SingleSpaceKey = " ";
+ private AssistantClient? _assistantClient;
///
/// Gets an active client instance.
///
public OpenAIClient Client { get; }
+ ///
+ /// Gets an active assistant client instance.
+ ///
+ public AssistantClient AssistantClient => this._assistantClient ??= this.Client.GetAssistantClient();
+
///
/// Gets configuration keys required for management.
///
@@ -36,7 +43,7 @@ public sealed class OpenAIClientProvider
private OpenAIClientProvider(OpenAIClient client, IEnumerable keys)
{
this.Client = client;
- this.ConfigurationKeys = keys.ToArray();
+ this.ConfigurationKeys = [.. keys];
}
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
index f13dcd654e9a..11f4adb6cfe3 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
@@ -7,6 +8,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// Specifies thread creation options.
///
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateThreadAsync() to create a thread.")]
public sealed class OpenAIThreadCreationOptions
{
///
diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
index 32d31f65c776..dee4aed044c3 100644
--- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
+++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
@@ -8,9 +8,15 @@
true
false
12
- $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
+ $(NoWarn);CA2007,CA1812,CA1861,CA1063,CS0618,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
+
+
+
+
+
+
@@ -41,8 +47,4 @@
-
-
-
-
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
new file mode 100644
index 000000000000..f1cdd1e429cd
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
@@ -0,0 +1,357 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.Net;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
+
+///
+/// Unit testing of .
+///
+public sealed class AssistantClientExtensionsTests : IDisposable
+{
+ private const string ModelValue = "testmodel";
+
+ private readonly HttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+ private readonly OpenAIClientProvider _clientProvider;
+
+ ///
+ /// Verify the assistant creation with default values.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition(ModelValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(modelId: ModelValue);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Equal(ModelValue, definition.Model);
+ }
+
+ ///
+ /// Verify the assistant creation with name, instructions, and description.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithIdentityAsync()
+ {
+ // Arrange
+ const string NameValue = "test name";
+ const string DescriptionValue = "test instructions";
+ const string InstructionsValue = "test description";
+
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(
+ ModelValue,
+ name: NameValue,
+ instructions: InstructionsValue,
+ description: DescriptionValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: ModelValue,
+ name: NameValue,
+ instructions: InstructionsValue,
+ description: DescriptionValue);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Equal(NameValue, definition.Name);
+ Assert.Equal(DescriptionValue, definition.Description);
+ Assert.Equal(InstructionsValue, definition.Instructions);
+ }
+
+ ///
+ /// Verify the assistant creation with name, instructions, and description.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithTemplateAsync()
+ {
+ // Arrange
+ const string NameValue = "test name";
+ const string DescriptionValue = "test instructions";
+ const string InstructionsValue = "test description";
+ PromptTemplateConfig templateConfig =
+ new(InstructionsValue)
+ {
+ Name = NameValue,
+ Description = InstructionsValue,
+ };
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(
+ ModelValue,
+ name: NameValue,
+ instructions: InstructionsValue,
+ description: DescriptionValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantFromTemplateAsync(modelId: ModelValue, templateConfig);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Equal(NameValue, definition.Name);
+ Assert.Equal(DescriptionValue, definition.Description);
+ Assert.Equal(InstructionsValue, definition.Instructions);
+ }
+
+ ///
+ /// Verify the assistant creation with code-interpreter enabled.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithCodeInterpreterAsync()
+ {
+ // Arrange
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableCodeInterpreter: true));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: ModelValue,
+ enableCodeInterpreter: true);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Single(definition.Tools);
+ Assert.IsType(definition.Tools[0]);
+ }
+
+ ///
+ /// Verify the assistant creation with code-interpreter files specified.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithCodeInterpreterFilesAsync()
+ {
+ // Arrange
+ string[] fileIds = ["file1", "file2"];
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, codeInterpreterFileIds: fileIds));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: ModelValue,
+ codeInterpreterFileIds: fileIds);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Single(definition.Tools);
+ Assert.IsType(definition.Tools[0]);
+ Assert.NotNull(definition.ToolResources.CodeInterpreter);
+ Assert.Equal(2, definition.ToolResources.CodeInterpreter.FileIds.Count);
+ }
+
+ ///
+ /// Verify the assistant creation with file-search enabled.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithFileSearchAsync()
+ {
+ // Arrange
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableFileSearch: true));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: ModelValue,
+ enableFileSearch: true);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Single(definition.Tools);
+ Assert.IsType(definition.Tools[0]);
+ }
+
+ ///
+ /// Verify the assistant creation with vector-store specified.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithVectorStoreAsync()
+ {
+ // Arrange
+ const string VectorStoreValue = "test store";
+ this.SetupResponse(
+ HttpStatusCode.OK,
+ OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, vectorStoreId: VectorStoreValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: ModelValue,
+ vectorStoreId: VectorStoreValue);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Single(definition.Tools);
+ Assert.IsType(definition.Tools[0]);
+ Assert.NotNull(definition.ToolResources.FileSearch);
+ Assert.Single(definition.ToolResources.FileSearch.VectorStoreIds);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with temperature defined.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithTemperatureAsync()
+ {
+ // Arrange
+ const float TemperatureValue = 0.5F;
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", temperature: TemperatureValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: "testmodel",
+ temperature: TemperatureValue);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Equal(TemperatureValue, definition.Temperature);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with topP defined.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithTopPAsync()
+ {
+ // Arrange
+ const float TopPValue = 2.0F;
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", topP: TopPValue));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: "testmodel",
+ topP: TopPValue);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.Equal(TopPValue, definition.NucleusSamplingFactor);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with execution settings and meta-data.
+ ///
+ [Fact]
+ public async Task VerifyCreateAssistantWithMetadataAsync()
+ {
+ // Arrange
+ Dictionary metadata =
+ new()
+ {
+ { "a", "1" },
+ { "b", "2" },
+ };
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", metadata: metadata));
+
+ // Act
+ Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+ modelId: "testmodel",
+ metadata: metadata);
+
+ // Assert
+ Assert.NotNull(definition);
+ Assert.NotEmpty(definition.Metadata);
+ }
+
+ ///
+ /// Verify the deletion of assistant.
+ ///
+ [Fact]
+ public async Task VerifyDeleteAssistantAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
+
+ // Act
+ AssistantDeletionResult result = await this._clientProvider.AssistantClient.DeleteAssistantAsync("testid");
+
+ // Assert
+ Assert.True(result.Deleted);
+ }
+
+ ///
+ /// Verify the creating a thread.
+ ///
+ [Fact]
+ public async Task VerifyCreateThreadAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+
+ // Act
+ string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: null);
+
+ // Assert
+ Assert.NotNull(threadId);
+ }
+
+ ///
+ /// Verify the creating a thread with messages.
+ ///
+ [Fact]
+ public async Task VerifyCreateThreadWithMessagesAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+
+ // Act
+ string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: [new ChatMessageContent(AuthorRole.User, "test")]);
+
+ // Assert
+ Assert.NotNull(threadId);
+ }
+
+ ///
+ /// Verify the creating a thread with metadata.
+ ///
+ [Fact]
+ public async Task VerifyCreateThreadWithMetadataAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+ Dictionary metadata = new() { { "a", "1" }, { "b", "2" } };
+
+ // Act
+ string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(metadata: metadata);
+
+ // Assert
+ Assert.NotNull(threadId);
+ }
+
+ ///
+ public void Dispose()
+ {
+ this._messageHandlerStub.Dispose();
+ this._httpClient.Dispose();
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public AssistantClientExtensionsTests()
+ {
+ this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
+ this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
+ }
+
+ private void SetupResponse(HttpStatusCode statusCode, string content) =>
+ this._messageHandlerStub.SetupResponses(statusCode, content);
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
deleted file mode 100644
index 70c27ccb2152..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Xunit;
-using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
-
-///
-/// Unit testing of .
-///
-public class KernelExtensionsTests
-{
- ///
- /// Verify function lookup using KernelExtensions.
- ///
- [Fact]
- public void VerifyGetKernelFunctionLookup()
- {
- // Arrange
- Kernel kernel = new();
- KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- kernel.Plugins.Add(plugin);
-
- // Act
- KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-');
-
- // Assert
- Assert.NotNull(function);
- Assert.Equal(nameof(TestPlugin.TestFunction), function.Name);
- }
-
- ///
- /// Verify error case for function lookup using KernelExtensions.
- ///
- [Fact]
- public void VerifyGetKernelFunctionInvalid()
- {
- // Arrange
- Kernel kernel = new();
- KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- kernel.Plugins.Add(plugin);
-
- // Act and Assert
- Assert.Throws(() => kernel.GetKernelFunction("a", '-'));
- Assert.Throws(() => kernel.GetKernelFunction("a-b", ':'));
- Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-'));
- }
-
- ///
- /// Exists only for parsing.
- ///
-#pragma warning disable CA1812 // Avoid uninstantiated internal classes
- private sealed class TestPlugin()
-#pragma warning restore CA1812 // Avoid uninstantiated internal classes
- {
- [KernelFunction]
- public void TestFunction() { }
- }
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
new file mode 100644
index 000000000000..ce03e8f5843e
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.IO;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.VectorStores;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
+
+///
+/// Unit testing of .
+///
+public sealed class OpenAIClientExtensionsTests : IDisposable
+{
+ private readonly HttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+ private readonly OpenAIClientProvider _clientProvider;
+
+ ///
+ /// Verify the default creation of vector-store.
+ ///
+ [Fact]
+ public async Task VerifyCreateDefaultVectorStoreAsync()
+ {
+ // Arrange
+ string[] fileIds = ["file-1", "file-2"];
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
+
+ // Act
+ string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(fileIds, waitUntilCompleted: false);
+
+ // Assert
+ Assert.NotNull(storeId);
+ }
+
+ ///
+ /// Verify the custom creation of vector-store.
+ ///
+ [Fact]
+ public async Task VerifyCreateVectorStoreAsync()
+ {
+ // Arrange
+ string[] fileIds = ["file-1", "file-2"];
+ Dictionary metadata =
+ new()
+ {
+ { "a", "1" },
+ { "b", "2" },
+ };
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
+
+ // Act
+ string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(
+ fileIds,
+ waitUntilCompleted: false,
+ storeName: "test-store",
+ expirationPolicy: new VectorStoreExpirationPolicy(VectorStoreExpirationAnchor.LastActiveAt, 30),
+ chunkingStrategy: FileChunkingStrategy.Auto,
+ metadata: metadata);
+
+ // Assert
+ Assert.NotNull(storeId);
+ }
+
+ ///
+ /// Verify the uploading an assistant file.
+ ///
+ [Fact]
+ public async Task VerifyUploadFileAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
+
+ // Act
+ await using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
+ string fileId = await this._clientProvider.Client.UploadAssistantFileAsync(stream, "text.txt");
+
+ // Assert
+ Assert.NotNull(fileId);
+ }
+
+ ///
+ /// Verify the deleting a file.
+ ///
+ [Fact]
+ public async Task VerifyDeleteFileAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteFile);
+
+ // Act
+ bool isDeleted = await this._clientProvider.Client.DeleteFileAsync("file-id");
+
+ // Assert
+ Assert.True(isDeleted);
+ }
+
+ ///
+ /// Verify the deleting a vector-store.
+ ///
+ [Fact]
+ public async Task VerifyDeleteVectorStoreAsync()
+ {
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteVectorStore);
+
+ // Act
+ bool isDeleted = await this._clientProvider.Client.DeleteVectorStoreAsync("store-id");
+
+ // Assert
+ Assert.True(isDeleted);
+ }
+
+ ///
+ public void Dispose()
+ {
+ this._messageHandlerStub.Dispose();
+ this._httpClient.Dispose();
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public OpenAIClientExtensionsTests()
+ {
+ this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
+ this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
+ }
+
+ private void SetupResponse(HttpStatusCode statusCode, string content) =>
+ this._messageHandlerStub.SetupResponses(statusCode, content);
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
index 15fd0d6aa5ae..dfca85afc0f2 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -21,19 +20,16 @@ public class AssistantRunOptionsFactoryTests
public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
{
// Arrange
- OpenAIAssistantDefinition definition =
- new("gpt-anything")
+ RunCreationOptions defaultOptions =
+ new()
{
+ ModelOverride = "gpt-anything",
Temperature = 0.5F,
- ExecutionOptions =
- new()
- {
- AdditionalInstructions = "test",
- },
+ AdditionalInstructions = "test",
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
// Assert
Assert.NotNull(options);
@@ -52,20 +48,21 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
{
// Arrange
- OpenAIAssistantDefinition definition =
- new("gpt-anything")
+ RunCreationOptions defaultOptions =
+ new()
{
+ ModelOverride = "gpt-anything",
Temperature = 0.5F,
};
- OpenAIAssistantInvocationOptions invocationOptions =
+ RunCreationOptions invocationOptions =
new()
{
Temperature = 0.5F,
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, "test", invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, "test", invocationOptions);
// Assert
Assert.NotNull(options);
@@ -81,29 +78,26 @@ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
{
// Arrange
- OpenAIAssistantDefinition definition =
- new("gpt-anything")
+ RunCreationOptions defaultOptions =
+ new()
{
+ ModelOverride = "gpt-anything",
Temperature = 0.5F,
- ExecutionOptions =
- new()
- {
- AdditionalInstructions = "test1",
- TruncationMessageCount = 5,
- },
+ TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
};
- OpenAIAssistantInvocationOptions invocationOptions =
+ RunCreationOptions invocationOptions =
new()
{
+ ModelOverride = "gpt-anything",
AdditionalInstructions = "test2",
Temperature = 0.9F,
- TruncationMessageCount = 8,
- EnableJsonResponse = true,
+ TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(8),
+ ResponseFormat = AssistantResponseFormat.JsonObject,
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
// Assert
Assert.NotNull(options);
@@ -121,21 +115,18 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
{
// Arrange
- OpenAIAssistantDefinition definition =
- new("gpt-anything")
+ RunCreationOptions defaultOptions =
+ new()
{
+ ModelOverride = "gpt-anything",
Temperature = 0.5F,
- ExecutionOptions =
- new()
- {
- TruncationMessageCount = 5,
- },
+ TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
};
- OpenAIAssistantInvocationOptions invocationOptions =
+ RunCreationOptions invocationOptions =
new()
{
- Metadata = new Dictionary
+ Metadata =
{
{ "key1", "value" },
{ "key2", null! },
@@ -143,7 +134,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
// Assert
Assert.Equal(2, options.Metadata.Count);
@@ -158,18 +149,21 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
{
// Arrange
- OpenAIAssistantDefinition definition = new("gpt-anything");
+ RunCreationOptions defaultOptions =
+ new()
+ {
+ ModelOverride = "gpt-anything",
+ };
- OpenAIAssistantInvocationOptions invocationOptions =
+ ChatMessageContent message = new(AuthorRole.User, "test message");
+ RunCreationOptions invocationOptions =
new()
{
- AdditionalMessages = [
- new ChatMessageContent(AuthorRole.User, "test message")
- ]
+ AdditionalMessages = { message.ToThreadInitializationMessage() },
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
// Assert
Assert.Single(options.AdditionalMessages);
@@ -182,20 +176,17 @@ public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
public void AssistantRunOptionsFactoryExecutionOptionsMaxTokensTest()
{
// Arrange
- OpenAIAssistantDefinition definition =
- new("gpt-anything")
+ RunCreationOptions defaultOptions =
+ new()
{
+ ModelOverride = "gpt-anything",
Temperature = 0.5F,
- ExecutionOptions =
- new()
- {
- MaxCompletionTokens = 4096,
- MaxPromptTokens = 1024,
- },
+ MaxOutputTokenCount = 4096,
+ MaxInputTokenCount = 1024,
};
// Act
- RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
// Assert
Assert.Equal(1024, options.MaxInputTokenCount);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
index 7ca3ca17aba2..692938564f9c 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
@@ -2,11 +2,9 @@
using System;
using System.ClientModel;
using System.Collections.Generic;
-using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
-using System.Text;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
@@ -312,7 +310,7 @@ public async Task VerifyOpenAIAssistantAgentRetrievalAsync()
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.RetrieveAsync(
- this.CreateTestConfiguration(),
+ this.CreateTestProvider(),
"#id",
this._emptyKernel);
@@ -333,10 +331,10 @@ public async Task VerifyOpenAIAssistantAgentRetrievalWithFactoryAsync()
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.RetrieveAsync(
- this.CreateTestConfiguration(),
+ this.CreateTestProvider(),
"#id",
this._emptyKernel,
- new KernelArguments(),
+ [],
new KernelPromptTemplateFactory());
// Act and Assert
@@ -351,26 +349,13 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
{
// Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
- // Assert
- Assert.False(agent.IsDeleted);
-
- // Arrange
this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
// Act
- await agent.DeleteAsync();
- // Assert
- Assert.True(agent.IsDeleted);
+ bool isDeleted = await agent.DeleteAsync();
- // Act
- await agent.DeleteAsync(); // Doesn't throw
// Assert
- Assert.True(agent.IsDeleted);
- await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test")));
- await Assert.ThrowsAsync(() => agent.GetThreadMessagesAsync("threadid").ToArrayAsync().AsTask());
- await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask());
- await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid").ToArrayAsync().AsTask());
- await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid", new OpenAIAssistantInvocationOptions()).ToArrayAsync().AsTask());
+ Assert.True(isDeleted);
}
///
@@ -414,25 +399,6 @@ public async Task VerifyOpenAIAssistantAgentDeleteThreadAsync()
Assert.True(isDeleted);
}
- ///
- /// Verify the deleting a thread via .
- ///
- [Fact]
- public async Task VerifyOpenAIAssistantAgentUploadFileAsync()
- {
- // Arrange
- OpenAIAssistantAgent agent = await this.CreateAgentAsync();
-
- this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
-
- // Act
- using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
- string fileId = await agent.UploadFileAsync(stream, "text.txt");
-
- // Assert
- Assert.NotNull(fileId);
- }
-
///
/// Verify invocation via .
///
@@ -683,7 +649,7 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
// Act
var messages =
await OpenAIAssistantAgent.ListDefinitionsAsync(
- this.CreateTestConfiguration()).ToArrayAsync();
+ this.CreateTestProvider()).ToArrayAsync();
// Assert
Assert.Equal(7, messages.Length);
@@ -696,7 +662,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync(
// Act
messages =
await OpenAIAssistantAgent.ListDefinitionsAsync(
- this.CreateTestConfiguration()).ToArrayAsync();
+ this.CreateTestProvider()).ToArrayAsync();
// Assert
Assert.Equal(4, messages.Length);
}
@@ -758,7 +724,7 @@ private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
- this.CreateTestConfiguration(),
+ this.CreateTestProvider(),
definition,
this._emptyKernel);
@@ -774,10 +740,10 @@ private async Task VerifyAgentTemplateAsync(
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateFromTemplateAsync(
- this.CreateTestConfiguration(),
+ this.CreateTestProvider(),
capabilities,
this._emptyKernel,
- new KernelArguments(),
+ [],
templateConfig,
templateFactory);
@@ -804,9 +770,8 @@ private static void ValidateAgent(
// Verify fundamental state
Assert.NotNull(agent);
Assert.NotNull(agent.Id);
- Assert.False(agent.IsDeleted);
Assert.NotNull(agent.Definition);
- Assert.Equal(expectedConfig.ModelId, agent.Definition.ModelId);
+ Assert.Equal(expectedConfig.ModelId, agent.Definition.Model);
// Verify core properties
Assert.Equal(expectedInstructions ?? string.Empty, agent.Instructions);
@@ -815,11 +780,7 @@ private static void ValidateAgent(
// Verify options
Assert.Equal(expectedConfig.Temperature, agent.Definition.Temperature);
- Assert.Equal(expectedConfig.TopP, agent.Definition.TopP);
- Assert.Equal(expectedConfig.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
- Assert.Equal(expectedConfig.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
- Assert.Equal(expectedConfig.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
- Assert.Equal(expectedConfig.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
+ Assert.Equal(expectedConfig.TopP, agent.Definition.NucleusSamplingFactor);
// Verify tool definitions
int expectedToolCount = 0;
@@ -831,7 +792,7 @@ private static void ValidateAgent(
++expectedToolCount;
}
- Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any());
+ Assert.Equal(hasCodeInterpreter, agent.Definition.Tools.OfType().Any());
bool hasFileSearch = false;
if (expectedConfig.EnableFileSearch)
@@ -840,9 +801,9 @@ private static void ValidateAgent(
++expectedToolCount;
}
- Assert.Equal(hasFileSearch, agent.Tools.OfType().Any());
+ Assert.Equal(hasFileSearch, agent.Definition.Tools.OfType().Any());
- Assert.Equal(expectedToolCount, agent.Tools.Count);
+ Assert.Equal(expectedToolCount, agent.Definition.Tools.Count);
// Verify metadata
Assert.NotNull(agent.Definition.Metadata);
@@ -866,8 +827,8 @@ private static void ValidateAgent(
}
// Verify detail definition
- Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.VectorStoreId);
- Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
+ Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.ToolResources.FileSearch?.VectorStoreIds.SingleOrDefault());
+ Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.ToolResources.CodeInterpreter?.FileIds);
}
private Task CreateAgentAsync()
@@ -878,12 +839,12 @@ private Task CreateAgentAsync()
return
OpenAIAssistantAgent.CreateAsync(
- this.CreateTestConfiguration(),
+ this.CreateTestProvider(),
definition,
this._emptyKernel);
}
- private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
+ private OpenAIClientProvider CreateTestProvider(bool targetAzure = false)
=> targetAzure ?
OpenAIClientProvider.ForAzureOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: new Uri("https://localhost"), this._httpClient) :
OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
index 7ae3cbaeacbe..3ecf07fada5e 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
@@ -6,6 +7,7 @@
using System.Text.Json;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.Assistants;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.OpenAI;
@@ -121,7 +123,7 @@ public static string AssistantDefinition(
builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
}
- if (hasFileSearch)
+ if (hasFileSearch && capabilities.VectorStoreId != null)
{
builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{capabilities.VectorStoreId}""] }}");
}
@@ -173,6 +175,115 @@ public static string AssistantDefinition(
return builder.ToString();
}
+ ///
+ /// The response for creating or querying an assistant definition.
+ ///
+ public static string AssistantDefinition(
+ string modelId,
+ string? name = null,
+ string? description = null,
+ string? instructions = null,
+ bool enableCodeInterpreter = false,
+ IReadOnlyList? codeInterpreterFileIds = null,
+ bool enableFileSearch = false,
+ string? vectorStoreId = null,
+ float? temperature = null,
+ float? topP = null,
+ AssistantResponseFormat? responseFormat = null,
+ IReadOnlyDictionary? metadata = null)
+ {
+ StringBuilder builder = new();
+ builder.AppendLine("{");
+ builder.AppendLine(@$" ""id"": ""{AssistantId}"",");
+ builder.AppendLine(@" ""object"": ""assistant"",");
+ builder.AppendLine(@" ""created_at"": 1698984975,");
+ builder.AppendLine(@$" ""name"": ""{name}"",");
+ builder.AppendLine(@$" ""description"": ""{description}"",");
+ builder.AppendLine(@$" ""instructions"": ""{instructions}"",");
+ builder.AppendLine(@$" ""model"": ""{modelId}"",");
+
+ bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0;
+ bool hasCodeInterpreter = enableCodeInterpreter || hasCodeInterpreterFiles;
+ bool hasFileSearch = enableFileSearch || vectorStoreId != null;
+ if (!hasCodeInterpreter && !hasFileSearch)
+ {
+ builder.AppendLine(@" ""tools"": [],");
+ }
+ else
+ {
+ builder.AppendLine(@" ""tools"": [");
+
+ if (hasCodeInterpreter)
+ {
+ builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch)
+ {
+ builder.AppendLine(@" { ""type"": ""file_search"" }");
+ }
+
+ builder.AppendLine(" ],");
+ }
+
+ if (!hasCodeInterpreterFiles && !hasFileSearch)
+ {
+ builder.AppendLine(@" ""tool_resources"": {},");
+ }
+ else
+ {
+ builder.AppendLine(@" ""tool_resources"": {");
+
+ if (hasCodeInterpreterFiles)
+ {
+ string fileIds = string.Join(",", codeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
+ builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch && vectorStoreId != null)
+ {
+ builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{vectorStoreId}""] }}");
+ }
+
+ builder.AppendLine(" },");
+ }
+
+ if (temperature.HasValue)
+ {
+ builder.AppendLine(@$" ""temperature"": {temperature},");
+ }
+
+ if (topP.HasValue)
+ {
+ builder.AppendLine(@$" ""top_p"": {topP},");
+ }
+ int metadataCount = (metadata?.Count ?? 0);
+ if (metadataCount == 0)
+ {
+ builder.AppendLine(@" ""metadata"": {}");
+ }
+ else
+ {
+ int index = 0;
+ builder.AppendLine(@" ""metadata"": {");
+
+ if (metadataCount > 0)
+ {
+ foreach (var (key, value) in metadata!)
+ {
+ builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
+ ++index;
+ }
+ }
+
+ builder.AppendLine(" }");
+ }
+
+ builder.AppendLine("}");
+
+ return builder.ToString();
+ }
+
public const string DeleteAgent =
$$$"""
{
@@ -445,6 +556,42 @@ public static string GetTextMessage(string text = "test") =>
}
""";
+ public static string DeleteFile =
+ """
+ {
+ "id": "file-abc123",
+ "object": "file",
+ "deleted": true
+ }
+ """;
+
+ public static string CreateVectorStore =
+ """
+ {
+ "id": "vs_abc123",
+ "object": "vector_store",
+ "created_at": 1699061776,
+ "name": "test store",
+ "bytes": 139920,
+ "file_counts": {
+ "in_progress": 0,
+ "completed": 3,
+ "failed": 0,
+ "cancelled": 0,
+ "total": 3
+ }
+ }
+ """;
+
+ public static string DeleteVectorStore =
+ """
+ {
+ "id": "vs-abc123",
+ "object": "vector_store.deleted",
+ "deleted": true
+ }
+ """;
+
#endregion
///
diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
index 9be5610f2abd..dd39b660966d 100644
--- a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
+++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs
@@ -12,6 +12,7 @@
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
+using OpenAI.Assistants;
using SemanticKernel.IntegrationTests.TestSettings;
using xRetry;
using Xunit;
@@ -71,7 +72,7 @@ await this.VerifyAgentExecutionAsync(
private async Task VerifyAgentExecutionAsync(
Kernel chatCompletionKernel,
- OpenAIClientProvider config,
+ OpenAIClientProvider clientProvider,
string modelName,
bool useNewFunctionCallingModel)
{
@@ -94,16 +95,8 @@ private async Task VerifyAgentExecutionAsync(
chatAgent.Kernel.Plugins.Add(plugin);
// Configure assistant agent with the plugin.
- OpenAIAssistantAgent assistantAgent =
- await OpenAIAssistantAgent.CreateAsync(
- config,
- new(modelName)
- {
- Name = "Assistant",
- Instructions = "Answer questions about the menu."
- },
- new Kernel());
- assistantAgent.Kernel.Plugins.Add(plugin);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu.");
+ OpenAIAssistantAgent assistantAgent = new(definition, clientProvider.AssistantClient, [plugin]);
// Act & Assert
try
@@ -114,7 +107,7 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await assistantAgent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteAssistantAsync(assistantAgent.Id);
}
}
diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
index aa5fcbeef785..ad63eab6b795 100644
--- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs
@@ -12,6 +12,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
using OpenAI.Files;
using OpenAI.VectorStores;
using SemanticKernel.IntegrationTests.TestSettings;
@@ -93,11 +94,10 @@ await this.ExecuteStreamingAgentAsync(
[InlineData("What is the special soup?", "Clam Chowder")]
public async Task AzureOpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains)
{
- var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
- Assert.NotNull(azureOpenAIConfiguration);
+ AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration();
await this.ExecuteStreamingAgentAsync(
- OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)),
+ CreateClientProvider(azureOpenAIConfiguration),
azureOpenAIConfiguration.ChatDeploymentName!,
input,
expectedAnswerContains);
@@ -110,27 +110,23 @@ await this.ExecuteStreamingAgentAsync(
[RetryFact(typeof(HttpOperationException))]
public async Task AzureOpenAIAssistantAgentFunctionCallResultAsync()
{
- var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
- Assert.NotNull(azureOpenAIConfiguration);
-
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)),
- new(azureOpenAIConfiguration.ChatDeploymentName!),
- new Kernel());
+ AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration();
+ OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!);
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient);
- string threadId = await agent.CreateThreadAsync();
+ AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync();
ChatMessageContent functionResultMessage = new(AuthorRole.Assistant, [new FunctionResultContent("mock-function", result: "A result value")]);
try
{
- await agent.AddChatMessageAsync(threadId, functionResultMessage);
- var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync();
+ await agent.AddChatMessageAsync(thread.Id, functionResultMessage);
+ var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync();
Assert.Single(messages);
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id);
+ await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -141,33 +137,28 @@ await OpenAIAssistantAgent.CreateAsync(
[RetryFact(typeof(HttpOperationException))]
public async Task AzureOpenAIAssistantAgentTokensAsync()
{
- var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
- Assert.NotNull(azureOpenAIConfiguration);
+ AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration();
+ OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!, instructions: "Repeat the user all of the user messages");
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient)
+ {
+ RunOptions = new()
+ {
+ MaxOutputTokenCount = 16,
+ }
+ };
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)),
- new(azureOpenAIConfiguration.ChatDeploymentName!)
- {
- Instructions = "Repeat the user all of the user messages",
- ExecutionOptions = new()
- {
- MaxCompletionTokens = 16,
- }
- },
- new Kernel());
-
- string threadId = await agent.CreateThreadAsync();
+ AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync();
ChatMessageContent functionResultMessage = new(AuthorRole.User, "A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone.");
try
{
- await agent.AddChatMessageAsync(threadId, functionResultMessage);
- await Assert.ThrowsAsync(() => agent.InvokeAsync(threadId).ToArrayAsync().AsTask());
+ await agent.AddChatMessageAsync(thread.Id, functionResultMessage);
+ await Assert.ThrowsAsync(() => agent.InvokeAsync(thread.Id).ToArrayAsync().AsTask());
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id);
+ await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -178,48 +169,45 @@ await OpenAIAssistantAgent.CreateAsync(
[RetryFact(typeof(HttpOperationException))]
public async Task AzureOpenAIAssistantAgentAdditionalMessagesAsync()
{
- var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
- Assert.NotNull(azureOpenAIConfiguration);
-
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)),
- new(azureOpenAIConfiguration.ChatDeploymentName!),
- new Kernel());
+ AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration();
+ OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!);
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient);
- OpenAIThreadCreationOptions threadOptions = new()
+ ThreadCreationOptions threadOptions = new()
{
- Messages = [
- new ChatMessageContent(AuthorRole.User, "Hello"),
- new ChatMessageContent(AuthorRole.Assistant, "How may I help you?"),
- ]
+ InitialMessages =
+ {
+ new ChatMessageContent(AuthorRole.User, "Hello").ToThreadInitializationMessage(),
+ new ChatMessageContent(AuthorRole.User, "How may I help you?").ToThreadInitializationMessage(),
+ }
};
- string threadId = await agent.CreateThreadAsync(threadOptions);
+ AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(threadOptions);
try
{
- var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync();
+ var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync();
Assert.Equal(2, messages.Length);
- OpenAIAssistantInvocationOptions invocationOptions = new()
+ RunCreationOptions invocationOptions = new()
{
- AdditionalMessages = [
- new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:"),
- new ChatMessageContent(AuthorRole.User, "Part 1"),
- new ChatMessageContent(AuthorRole.User, "Part 2"),
- new ChatMessageContent(AuthorRole.User, "Part 3"),
- ]
+ AdditionalMessages = {
+ new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:").ToThreadInitializationMessage(),
+ new ChatMessageContent(AuthorRole.User, "Part 1").ToThreadInitializationMessage(),
+ new ChatMessageContent(AuthorRole.User, "Part 2").ToThreadInitializationMessage(),
+ new ChatMessageContent(AuthorRole.User, "Part 3").ToThreadInitializationMessage(),
+ }
};
- messages = await agent.InvokeAsync(threadId, invocationOptions).ToArrayAsync();
+ messages = await agent.InvokeAsync(thread.Id, invocationOptions).ToArrayAsync();
Assert.Single(messages);
- messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync();
+ messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync();
Assert.Equal(7, messages.Length);
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id);
+ await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
@@ -230,23 +218,18 @@ await OpenAIAssistantAgent.CreateAsync(
[Fact]
public async Task AzureOpenAIAssistantAgentStreamingFileSearchAsync()
{
- var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
- Assert.NotNull(azureOpenAIConfiguration);
-
- OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint));
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- provider,
- new(azureOpenAIConfiguration.ChatDeploymentName!),
- new Kernel());
+ AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration();
+ OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!);
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient);
// Upload file - Using a table of fictional employees.
- OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
+ OpenAIFileClient fileClient = clientProvider.Client.GetOpenAIFileClient();
await using Stream stream = File.OpenRead("TestData/employees.pdf")!;
OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
// Create a vector-store
- VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
+ VectorStoreClient vectorStoreClient = clientProvider.Client.GetVectorStoreClient();
CreateVectorStoreOperation result =
await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
new VectorStoreCreationOptions()
@@ -254,26 +237,26 @@ await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
FileIds = { fileInfo.Id }
});
- string threadId = await agent.CreateThreadAsync();
+ AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync();
try
{
- await agent.AddChatMessageAsync(threadId, new(AuthorRole.User, "Who works in sales?"));
+ await agent.AddChatMessageAsync(thread.Id, new(AuthorRole.User, "Who works in sales?"));
ChatHistory messages = [];
- var chunks = await agent.InvokeStreamingAsync(threadId, messages: messages).ToArrayAsync();
+ var chunks = await agent.InvokeStreamingAsync(thread.Id, messages: messages).ToArrayAsync();
Assert.NotEmpty(chunks);
Assert.Single(messages);
}
finally
{
- await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id);
+ await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id);
await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId);
await fileClient.DeleteFileAsync(fileInfo.Id);
}
}
private async Task ExecuteAgentAsync(
- OpenAIClientProvider config,
+ OpenAIClientProvider clientProvider,
string modelName,
string input,
string expected)
@@ -282,16 +265,8 @@ private async Task ExecuteAgentAsync(
Kernel kernel = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- kernel.Plugins.Add(plugin);
-
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- config,
- new(modelName)
- {
- Instructions = "Answer questions about the menu.",
- },
- kernel);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu.");
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]);
try
{
@@ -314,12 +289,12 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
- await agent.DeleteAsync();
+ await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id);
}
}
private async Task ExecuteStreamingAgentAsync(
- OpenAIClientProvider config,
+ OpenAIClientProvider clientProvider,
string modelName,
string input,
string expected)
@@ -328,16 +303,8 @@ private async Task ExecuteStreamingAgentAsync(
Kernel kernel = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
- kernel.Plugins.Add(plugin);
-
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- config,
- new(modelName)
- {
- Instructions = "Answer questions about the menu.",
- },
- kernel);
+ Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu.");
+ OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]);
AgentGroupChat chat = new();
chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
@@ -372,6 +339,18 @@ private static void AssertMessageValid(ChatMessageContent message)
Assert.Equal(string.IsNullOrEmpty(message.AuthorName) ? AuthorRole.User : AuthorRole.Assistant, message.Role);
}
+ private AzureOpenAIConfiguration ReadAzureConfiguration()
+ {
+ AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
+ Assert.NotNull(azureOpenAIConfiguration);
+ return azureOpenAIConfiguration;
+ }
+
+ private static OpenAIClientProvider CreateClientProvider(AzureOpenAIConfiguration azureOpenAIConfiguration)
+ {
+ return OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint));
+ }
+
public sealed class MenuPlugin
{
[KernelFunction, Description("Provides a list of specials from the menu.")]
diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs
index 1792192d6b90..7c9ee6a3c654 100644
--- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs
+++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs
@@ -1,13 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.ClientModel;
using System.Collections.ObjectModel;
using System.Diagnostics;
using Azure.AI.Projects;
-using Azure.Identity;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.AzureAI;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
using OpenAI.Assistants;
@@ -16,52 +13,41 @@
using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage;
///
-/// Base class for samples that demonstrate the usage of agents.
+/// Base class for samples that demonstrate the usage of host agents
+/// based on API's such as Open AI Assistants or Azure AI Agents.
///
-public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true)
+public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseAgentsTest(output)
{
///
/// Metadata key to indicate the assistant as created for a sample.
///
- protected const string AssistantSampleMetadataKey = "sksample";
-
- protected override bool ForceOpenAI => true;
+ protected const string SampleMetadataKey = "sksample";
///
- /// Metadata to indicate the assistant as created for a sample.
+ /// Metadata to indicate the object was created for a sample.
///
///
- /// While the samples do attempt delete the assistants it creates, it is possible
- /// that some assistants may remain. This metadata can be used to identify and sample
- /// agents for clean-up.
+ /// While the samples do attempt delete the objects it creates, it is possible
+ /// that some may remain. This metadata can be used to identify and sample
+ /// objects for manual clean-up.
///
- protected static readonly ReadOnlyDictionary AssistantSampleMetadata =
+ protected static readonly ReadOnlyDictionary SampleMetadata =
new(new Dictionary
{
- { AssistantSampleMetadataKey, bool.TrueString }
+ { SampleMetadataKey, bool.TrueString }
});
///
- /// Provide a according to the configuration settings.
- ///
- protected AzureAIClientProvider GetAzureProvider()
- {
- return AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential());
- }
-
- ///
- /// Provide a according to the configuration settings.
+ /// Gets the root client for the service.
///
- protected OpenAIClientProvider GetClientProvider()
- {
- return
- this.UseOpenAIConfig ?
- OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) :
- !string.IsNullOrWhiteSpace(this.ApiKey) ?
- OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) :
- OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!));
- }
+ protected abstract TClient Client { get; }
+}
+///
+/// Base class for samples that demonstrate the usage of agents.
+///
+public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true)
+{
///
/// Common method to write formatted agent chat content to the console.
///
diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs
new file mode 100644
index 000000000000..b9dd380c5058
--- /dev/null
+++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs
@@ -0,0 +1,91 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel;
+using System.Diagnostics;
+using Azure.Identity;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI;
+using OpenAI.Assistants;
+using OpenAI.Files;
+
+///
+/// Base class for samples that demonstrate the usage of .
+///
+public abstract class BaseAssistantTest : BaseAgentsTest
+{
+ protected BaseAssistantTest(ITestOutputHelper output) : base(output)
+ {
+ var clientProvider =
+ this.UseOpenAIConfig ?
+ OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) :
+ !string.IsNullOrWhiteSpace(this.ApiKey) ?
+ OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) :
+ OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!));
+
+ this.Client = clientProvider.Client;
+ this.AssistantClient = clientProvider.AssistantClient;
+ }
+
+ ///
+ protected override OpenAIClient Client { get; }
+
+ ///
+ /// Gets the the .
+ ///
+ protected AssistantClient AssistantClient { get; }
+
+ protected async Task DownloadResponseContentAsync(ChatMessageContent message)
+ {
+ OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient();
+
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is AnnotationContent annotation)
+ {
+ await this.DownloadFileContentAsync(fileClient, annotation.FileId!);
+ }
+ }
+ }
+
+ protected async Task DownloadResponseImageAsync(ChatMessageContent message)
+ {
+ OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient();
+
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is FileReferenceContent fileReference)
+ {
+ await this.DownloadFileContentAsync(fileClient, fileReference.FileId, launchViewer: true);
+ }
+ }
+ }
+
+ private async Task DownloadFileContentAsync(OpenAIFileClient fileClient, string fileId, bool launchViewer = false)
+ {
+ OpenAIFile fileInfo = fileClient.GetFile(fileId);
+ if (fileInfo.Purpose == FilePurpose.AssistantsOutput)
+ {
+ string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename));
+ if (launchViewer)
+ {
+ filePath = Path.ChangeExtension(filePath, ".png");
+ }
+
+ BinaryData content = await fileClient.DownloadFileAsync(fileId);
+ File.WriteAllBytes(filePath, content.ToArray());
+ Console.WriteLine($" File #{fileId} saved to: {filePath}");
+
+ if (launchViewer)
+ {
+ Process.Start(
+ new ProcessStartInfo
+ {
+ FileName = "cmd.exe",
+ Arguments = $"/C start {filePath}"
+ });
+ }
+ }
+ }
+}
diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs
new file mode 100644
index 000000000000..a36932db1f38
--- /dev/null
+++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs
@@ -0,0 +1,173 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel;
+using System.Collections.ObjectModel;
+using System.Diagnostics;
+using Azure.AI.Projects;
+using Azure.Identity;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using OpenAI.Files;
+
+using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage;
+
+///
+/// Base class for samples that demonstrate the usage of agents.
+///
+public abstract class BaseAzureTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true)
+{
+ ///
+ /// Metadata key to indicate the assistant as created for a sample.
+ ///
+ protected const string AssistantSampleMetadataKey = "sksample";
+
+ protected override bool ForceOpenAI => false;
+
+ ///
+ /// Metadata to indicate the object was created for a sample.
+ ///
+ ///
+ /// While the samples do attempt delete the objects it creates, it is possible
+ /// that some may remain. This metadata can be used to identify and sample
+ /// objects for manual clean-up.
+ ///
+ protected static readonly ReadOnlyDictionary SampleMetadata =
+ new(new Dictionary
+ {
+ { AssistantSampleMetadataKey, bool.TrueString }
+ });
+
+ ///
+ /// Provide a according to the configuration settings.
+ ///
+ protected AzureAIClientProvider GetAzureProvider()
+ {
+ return AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential());
+ }
+
+ ///
+ /// Provide a according to the configuration settings.
+ ///
+ protected OpenAIClientProvider GetClientProvider()
+ {
+ return
+ this.UseOpenAIConfig ?
+ OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) :
+ !string.IsNullOrWhiteSpace(this.ApiKey) ?
+ OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) :
+ OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!));
+ }
+
+ ///
+ /// Common method to write formatted agent chat content to the console.
+ ///
+ protected void WriteAgentChatMessage(ChatMessageContent message)
+ {
+ // Include ChatMessageContent.AuthorName in output, if present.
+ string authorExpression = message.Role == AuthorRole.User ? string.Empty : $" - {message.AuthorName ?? "*"}";
+ // Include TextContent (via ChatMessageContent.Content), if present.
+ string contentExpression = string.IsNullOrWhiteSpace(message.Content) ? string.Empty : message.Content;
+ bool isCode = message.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false;
+ string codeMarker = isCode ? "\n [CODE]\n" : " ";
+ Console.WriteLine($"\n# {message.Role}{authorExpression}:{codeMarker}{contentExpression}");
+
+ // Provide visibility for inner content (that isn't TextContent).
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is AnnotationContent annotation)
+ {
+ Console.WriteLine($" [{item.GetType().Name}] {annotation.Quote}: File #{annotation.FileId}");
+ }
+ else if (item is FileReferenceContent fileReference)
+ {
+ Console.WriteLine($" [{item.GetType().Name}] File #{fileReference.FileId}");
+ }
+ else if (item is ImageContent image)
+ {
+ Console.WriteLine($" [{item.GetType().Name}] {image.Uri?.ToString() ?? image.DataUri ?? $"{image.Data?.Length} bytes"}");
+ }
+ else if (item is FunctionCallContent functionCall)
+ {
+ Console.WriteLine($" [{item.GetType().Name}] {functionCall.Id}");
+ }
+ else if (item is FunctionResultContent functionResult)
+ {
+ Console.WriteLine($" [{item.GetType().Name}] {functionResult.CallId} - {functionResult.Result?.AsJson() ?? "*"}");
+ }
+ }
+
+ if (message.Metadata?.TryGetValue("Usage", out object? usage) ?? false)
+ {
+ if (usage is RunStepTokenUsage assistantUsage)
+ {
+ WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount);
+ }
+ else if (usage is RunStepCompletionUsage agentUsage)
+ {
+ WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens);
+ }
+ else if (usage is ChatTokenUsage chatUsage)
+ {
+ WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount);
+ }
+ }
+
+ void WriteUsage(long totalTokens, long inputTokens, long outputTokens)
+ {
+ Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}");
+ }
+ }
+
+ protected async Task DownloadResponseContentAsync(OpenAIFileClient client, ChatMessageContent message)
+ {
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is AnnotationContent annotation)
+ {
+ await this.DownloadFileContentAsync(client, annotation.FileId!);
+ }
+ }
+ }
+
+ protected async Task DownloadResponseImageAsync(OpenAIFileClient client, ChatMessageContent message)
+ {
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is FileReferenceContent fileReference)
+ {
+ await this.DownloadFileContentAsync(client, fileReference.FileId, launchViewer: true);
+ }
+ }
+ }
+
+ private async Task DownloadFileContentAsync(OpenAIFileClient client, string fileId, bool launchViewer = false)
+ {
+ OpenAIFile fileInfo = client.GetFile(fileId);
+ if (fileInfo.Purpose == FilePurpose.AssistantsOutput)
+ {
+ string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename));
+ if (launchViewer)
+ {
+ filePath = Path.ChangeExtension(filePath, ".png");
+ }
+
+ BinaryData content = await client.DownloadFileAsync(fileId);
+ File.WriteAllBytes(filePath, content.ToArray());
+ Console.WriteLine($" File #{fileId} saved to: {filePath}");
+
+ if (launchViewer)
+ {
+ Process.Start(
+ new ProcessStartInfo
+ {
+ FileName = "cmd.exe",
+ Arguments = $"/C start {filePath}"
+ });
+ }
+ }
+ }
+}
diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs
new file mode 100644
index 000000000000..32bf490a8230
--- /dev/null
+++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs
@@ -0,0 +1,68 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics;
+using Azure.AI.Projects;
+using Azure.Identity;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+
+///
+/// Base class for samples that demonstrate the usage of .
+///
+public abstract class BaseAzureAgentTest : BaseAgentsTest
+{
+ protected BaseAzureAgentTest(ITestOutputHelper output) : base(output)
+ {
+ var clientProvider = AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential());
+
+ this.Client = clientProvider.Client;
+ this.AgentsClient = clientProvider.AgentsClient;
+ }
+
+ ///
+ protected override AIProjectClient Client { get; }
+
+ ///
+ /// Gets the .
+ ///
+ protected AgentsClient AgentsClient { get; }
+
+ protected async Task DownloadContentAsync(ChatMessageContent message)
+ {
+ foreach (KernelContent item in message.Items)
+ {
+ if (item is AnnotationContent annotation)
+ {
+ await this.DownloadFileAsync(annotation.FileId!);
+ }
+ }
+ }
+
+ protected async Task DownloadFileAsync(string fileId, bool launchViewer = false)
+ {
+ AgentFile fileInfo = this.AgentsClient.GetFile(fileId);
+ if (fileInfo.Purpose == AgentFilePurpose.AgentsOutput)
+ {
+ string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename));
+ if (launchViewer)
+ {
+ filePath = Path.ChangeExtension(filePath, ".png");
+ }
+
+ BinaryData content = await this.AgentsClient.GetFileContentAsync(fileId);
+ File.WriteAllBytes(filePath, content.ToArray());
+ Console.WriteLine($" File #{fileId} saved to: {filePath}");
+
+ if (launchViewer)
+ {
+ Process.Start(
+ new ProcessStartInfo
+ {
+ FileName = "cmd.exe",
+ Arguments = $"/C start {filePath}"
+ });
+ }
+ }
+ }
+}