diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs
new file mode 100644
index 000000000000..041cee3f3cc9
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.TextGeneration;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIServiceCollectionExtensionsTests
+{
+ #region Chat completion
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.OpenAIClientInline)]
+ [InlineData(InitializationType.OpenAIClientInServiceProvider)]
+ public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ IServiceCollection collection = type switch
+ {
+ InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials),
+ InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client),
+ InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"),
+ _ => builder.Services
+ };
+
+ // Assert
+ var chatCompletionService = builder.Build().GetRequiredService();
+ Assert.True(chatCompletionService is AzureOpenAIChatCompletionService);
+
+ var textGenerationService = builder.Build().GetRequiredService();
+ Assert.True(textGenerationService is AzureOpenAIChatCompletionService);
+ }
+
+ #endregion
+
+ public enum InitializationType
+ {
+ ApiKey,
+ TokenCredential,
+ OpenAIClientInline,
+ OpenAIClientInServiceProvider,
+ OpenAIClientEndpoint,
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceKernelBuilderExtensionsTests.cs
new file mode 100644
index 000000000000..6025eb1d447f
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceKernelBuilderExtensionsTests.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.TextGeneration;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions;
+
+///
+/// Unit tests for class.
+///
+public sealed class AzureOpenAIServiceKernelBuilderExtensionsTests
+{
+ #region Chat completion
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.OpenAIClientInline)]
+ [InlineData(InitializationType.OpenAIClientInServiceProvider)]
+ public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ builder = type switch
+ {
+ InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials),
+ InitializationType.OpenAIClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client),
+ InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"),
+ _ => builder
+ };
+
+ // Assert
+ var chatCompletionService = builder.Build().GetRequiredService();
+ Assert.True(chatCompletionService is AzureOpenAIChatCompletionService);
+
+ var textGenerationService = builder.Build().GetRequiredService();
+ Assert.True(textGenerationService is AzureOpenAIChatCompletionService);
+ }
+
+ #endregion
+
+ public enum InitializationType
+ {
+ ApiKey,
+ TokenCredential,
+ OpenAIClientInline,
+ OpenAIClientInServiceProvider,
+ OpenAIClientEndpoint,
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatHistoryExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs
similarity index 95%
rename from dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatHistoryExtensionsTests.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs
index a0579f6d6c72..94fc1e5d1a5c 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/ChatHistoryExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs
@@ -7,7 +7,7 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
-namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions;
public class ChatHistoryExtensionsTests
{
[Fact]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
index 6486d7348144..4152f2137409 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/ClientCore.cs
@@ -126,7 +126,7 @@ internal ClientCore(ILogger? logger = null)
private static Dictionary GetChatChoiceMetadata(OpenAIChatCompletion completions)
{
#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
- return new Dictionary(12)
+ return new Dictionary(8)
{
{ nameof(completions.Id), completions.Id },
{ nameof(completions.CreatedAt), completions.CreatedAt },
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs
new file mode 100644
index 000000000000..782889c4542c
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs
@@ -0,0 +1,249 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Net.Http;
+using Azure;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.TextGeneration;
+
+#pragma warning disable IDE0039 // Use local function
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Provides extension methods for and related classes to configure Azure OpenAI connectors.
+///
+public static class AzureOpenAIServiceCollectionExtensions
+{
+ #region Chat Completion
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IKernelBuilder AddAzureOpenAIChatCompletion(
+ this IKernelBuilder builder,
+ string deploymentName,
+ string endpoint,
+ string apiKey,
+ string? serviceId = null,
+ string? modelId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.NotNullOrWhiteSpace(apiKey);
+
+ Func factory = (serviceProvider, _) =>
+ {
+ AzureOpenAIClient client = CreateAzureOpenAIClient(
+ endpoint,
+ new AzureKeyCredential(apiKey),
+ HttpClientProvider.GetHttpClient(httpClient, serviceProvider));
+
+ return new(deploymentName, client, modelId, serviceProvider.GetService());
+ };
+
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+
+ return builder;
+ }
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The same instance as .
+ public static IServiceCollection AddAzureOpenAIChatCompletion(
+ this IServiceCollection services,
+ string deploymentName,
+ string endpoint,
+ string apiKey,
+ string? serviceId = null,
+ string? modelId = null)
+ {
+ Verify.NotNull(services);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.NotNullOrWhiteSpace(apiKey);
+
+ Func factory = (serviceProvider, _) =>
+ {
+ AzureOpenAIClient client = CreateAzureOpenAIClient(
+ endpoint,
+ new AzureKeyCredential(apiKey),
+ HttpClientProvider.GetHttpClient(serviceProvider));
+
+ return new(deploymentName, client, modelId, serviceProvider.GetService());
+ };
+
+ services.AddKeyedSingleton(serviceId, factory);
+ services.AddKeyedSingleton(serviceId, factory);
+
+ return services;
+ }
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc.
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IKernelBuilder AddAzureOpenAIChatCompletion(
+ this IKernelBuilder builder,
+ string deploymentName,
+ string endpoint,
+ TokenCredential credentials,
+ string? serviceId = null,
+ string? modelId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.NotNull(credentials);
+
+ Func factory = (serviceProvider, _) =>
+ {
+ AzureOpenAIClient client = CreateAzureOpenAIClient(
+ endpoint,
+ credentials,
+ HttpClientProvider.GetHttpClient(httpClient, serviceProvider));
+
+ return new(deploymentName, client, modelId, serviceProvider.GetService());
+ };
+
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+
+ return builder;
+ }
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc.
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The same instance as .
+ public static IServiceCollection AddAzureOpenAIChatCompletion(
+ this IServiceCollection services,
+ string deploymentName,
+ string endpoint,
+ TokenCredential credentials,
+ string? serviceId = null,
+ string? modelId = null)
+ {
+ Verify.NotNull(services);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+ Verify.NotNullOrWhiteSpace(endpoint);
+ Verify.NotNull(credentials);
+
+ Func factory = (serviceProvider, _) =>
+ {
+ AzureOpenAIClient client = CreateAzureOpenAIClient(
+ endpoint,
+ credentials,
+ HttpClientProvider.GetHttpClient(serviceProvider));
+
+ return new(deploymentName, client, modelId, serviceProvider.GetService());
+ };
+
+ services.AddKeyedSingleton(serviceId, factory);
+ services.AddKeyedSingleton(serviceId, factory);
+
+ return services;
+ }
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The same instance as .
+ public static IKernelBuilder AddAzureOpenAIChatCompletion(
+ this IKernelBuilder builder,
+ string deploymentName,
+ AzureOpenAIClient? azureOpenAIClient = null,
+ string? serviceId = null,
+ string? modelId = null)
+ {
+ Verify.NotNull(builder);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+
+ Func factory = (serviceProvider, _) =>
+ new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService());
+
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+ builder.Services.AddKeyedSingleton(serviceId, factory);
+
+ return builder;
+ }
+
+ ///
+ /// Adds the Azure OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart
+ /// The same instance as .
+ public static IServiceCollection AddAzureOpenAIChatCompletion(
+ this IServiceCollection services,
+ string deploymentName,
+ AzureOpenAIClient? azureOpenAIClient = null,
+ string? serviceId = null,
+ string? modelId = null)
+ {
+ Verify.NotNull(services);
+ Verify.NotNullOrWhiteSpace(deploymentName);
+
+ Func factory = (serviceProvider, _) =>
+ new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService());
+
+ services.AddKeyedSingleton(serviceId, factory);
+ services.AddKeyedSingleton(serviceId, factory);
+
+ return services;
+ }
+
+ #endregion
+
+ private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) =>
+ new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient));
+
+ private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) =>
+ new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient));
+}
diff --git a/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs
new file mode 100644
index 000000000000..04f1be7e45c7
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs
@@ -0,0 +1,273 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Http.Resilience;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTestsV2.Connectors.AzureOpenAI;
+
+#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only.
+
+public sealed class AzureOpenAIChatCompletionTests
+{
+ [Fact]
+ //[Fact(Skip = "Skipping while we investigate issue with GitHub actions.")]
+ public async Task ItCanUseAzureOpenAiChatForTextGenerationAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var func = kernel.CreateFunctionFromPrompt(
+ "List the two planets after '{{$input}}', excluding moons, using bullet points.",
+ new AzureOpenAIPromptExecutionSettings());
+
+ // Act
+ var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" });
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase);
+ Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact]
+ public async Task AzureOpenAIStreamingTestAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin");
+
+ StringBuilder fullResult = new();
+
+ var prompt = "Where is the most famous fish market in Seattle, Washington, USA?";
+
+ // Act
+ await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }))
+ {
+ fullResult.Append(content);
+ }
+
+ // Assert
+ Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task AzureOpenAIHttpRetryPolicyTestAsync()
+ {
+ // Arrange
+ List statusCodes = [];
+
+ var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
+
+ this._kernelBuilder.AddAzureOpenAIChatCompletion(
+ deploymentName: azureOpenAIConfiguration!.ChatDeploymentName!,
+ modelId: azureOpenAIConfiguration.ChatModelId,
+ endpoint: azureOpenAIConfiguration.Endpoint,
+ apiKey: "INVALID_KEY");
+
+ this._kernelBuilder.Services.ConfigureHttpClientDefaults(c =>
+ {
+ // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example
+ c.AddStandardResilienceHandler().Configure(o =>
+ {
+ o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized);
+ o.Retry.OnRetry = args =>
+ {
+ statusCodes.Add(args.Outcome.Result?.StatusCode);
+ return ValueTask.CompletedTask;
+ };
+ });
+ });
+
+ var target = this._kernelBuilder.Build();
+
+ var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin");
+
+ var prompt = "Where is the most famous fish market in Seattle, Washington, USA?";
+
+ // Act
+ var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt }));
+
+ // Assert
+ Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s));
+ Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode);
+ }
+
+ [Fact]
+ public async Task AzureOpenAIShouldReturnMetadataAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin");
+
+ // Act
+ var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]);
+
+ // Assert
+ Assert.NotNull(result.Metadata);
+
+ // Usage
+ Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject));
+ Assert.NotNull(usageObject);
+
+ var jsonObject = JsonSerializer.SerializeToElement(usageObject);
+ Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson));
+ Assert.True(promptTokensJson.TryGetInt32(out int promptTokens));
+ Assert.NotEqual(0, promptTokens);
+
+ Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson));
+ Assert.True(completionTokensJson.TryGetInt32(out int completionTokens));
+ Assert.NotEqual(0, completionTokens);
+
+ // ContentFilterResults
+ Assert.True(result.Metadata.ContainsKey("ContentFilterResults"));
+ }
+
+ [Theory(Skip = "This test is for manual verification.")]
+ [InlineData("\n")]
+ [InlineData("\r\n")]
+ public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding)
+ {
+ // Arrange
+ var prompt =
+ "Given a json input and a request. Apply the request on the json input and return the result. " +
+ $"Put the result in between tags{lineEnding}" +
+ $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name""";
+
+ var kernel = this.CreateAndInitializeKernel();
+
+ var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin");
+
+ // Act
+ FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt });
+
+ // Assert
+ Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task ChatSystemPromptIsNotIgnoredAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings));
+
+ // Assert
+ Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task SemanticKernelVersionHeaderIsSentAsync()
+ {
+ // Arrange
+ using var defaultHandler = new HttpClientHandler();
+ using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler);
+ using var httpClient = new HttpClient(httpHeaderHandler);
+
+ var kernel = this.CreateAndInitializeKernel(httpClient);
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?");
+
+ // Assert
+ Assert.NotNull(httpHeaderHandler.RequestHeaders);
+ Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values));
+ }
+
+ //[Theory(Skip = "This test is for manual verification.")]
+ [Theory]
+ [InlineData(null, null)]
+ [InlineData(false, null)]
+ [InlineData(true, 2)]
+ [InlineData(true, 5)]
+ public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs)
+ {
+ // Arrange
+ var settings = new AzureOpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs };
+
+ var kernel = this.CreateAndInitializeKernel();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Hi, can you help me today?", new(settings));
+
+ var logProbabilityInfo = result.Metadata?["LogProbabilityInfo"] as IReadOnlyList;
+
+ // Assert
+ Assert.NotNull(logProbabilityInfo);
+
+ if (logprobs is true)
+ {
+ Assert.NotNull(logProbabilityInfo);
+ Assert.Equal(topLogprobs, logProbabilityInfo[0].TopLogProbabilities.Count);
+ }
+ else
+ {
+ Assert.Empty(logProbabilityInfo);
+ }
+ }
+
+ #region internals
+
+ private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null)
+ {
+ var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
+ Assert.NotNull(azureOpenAIConfiguration);
+ Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName);
+ Assert.NotNull(azureOpenAIConfiguration.ApiKey);
+ Assert.NotNull(azureOpenAIConfiguration.Endpoint);
+ Assert.NotNull(azureOpenAIConfiguration.ServiceId);
+
+ this._kernelBuilder.AddAzureOpenAIChatCompletion(
+ deploymentName: azureOpenAIConfiguration.ChatDeploymentName,
+ modelId: azureOpenAIConfiguration.ChatModelId,
+ endpoint: azureOpenAIConfiguration.Endpoint,
+ apiKey: azureOpenAIConfiguration.ApiKey,
+ serviceId: azureOpenAIConfiguration.ServiceId,
+ httpClient: httpClient);
+
+ return this._kernelBuilder.Build();
+ }
+
+ private const string InputParameterName = "input";
+ private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder();
+
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
+ {
+ public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; }
+
+ protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+ this.RequestHeaders = request.Headers;
+ return await base.SendAsync(request, cancellationToken);
+ }
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs b/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs
new file mode 100644
index 000000000000..5bbbd60c9005
--- /dev/null
+++ b/dotnet/src/IntegrationTestsV2/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs
@@ -0,0 +1,781 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using OpenAI.Chat;
+using SemanticKernel.IntegrationTests.TestSettings;
+using SemanticKernel.IntegrationTestsV2.Connectors.AzureOpenAI;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI;
+
+public sealed class AzureOpenAIChatCompletionFunctionCallingTests
+{
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionsAsync()
+ {
+ // Arrange
+ var invokedFunctions = new List();
+
+ var filter = new FakeFunctionFilter(async (context, next) =>
+ {
+ invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})");
+ await next(context);
+ });
+
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+ kernel.FunctionInvocationFilters.Add(filter);
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings));
+
+ // Assert
+ Assert.Contains("rain", result.GetValue(), StringComparison.InvariantCulture);
+ Assert.Contains("GetCurrentUtcTime()", invokedFunctions);
+ Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions);
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionsStreamingAsync()
+ {
+ // Arrange
+ var invokedFunctions = new List();
+
+ var filter = new FakeFunctionFilter(async (context, next) =>
+ {
+ invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})");
+ await next(context);
+ });
+
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+ kernel.FunctionInvocationFilters.Add(filter);
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ var stringBuilder = new StringBuilder();
+
+ // Act
+ await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)))
+ {
+ stringBuilder.Append(update);
+ }
+
+ // Assert
+ Assert.Contains("rain", stringBuilder.ToString(), StringComparison.InvariantCulture);
+ Assert.Contains("GetCurrentUtcTime()", invokedFunctions);
+ Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions);
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings));
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland.
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings));
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture);
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionsWithEnumTypeParametersAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings));
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("rain", result.GetValue(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionFromPromptAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var promptFunction = KernelFunctionFactory.CreateFromPrompt(
+ "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.",
+ functionName: "FindLatestNews",
+ description: "Searches for the latest news.");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions(
+ "NewsProvider",
+ "Delivers up-to-date news content.",
+ [promptFunction]));
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Show me the latest news as they are.", new(settings));
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("Transportation", result.GetValue(), StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact]
+ public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel();
+
+ var promptFunction = KernelFunctionFactory.CreateFromPrompt(
+ "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.",
+ functionName: "FindLatestNews",
+ description: "Searches for the latest news.");
+
+ kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions(
+ "NewsProvider",
+ "Delivers up-to-date news content.",
+ [promptFunction]));
+
+ AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = AzureOpenAIToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Act
+ var streamingResult = kernel.InvokePromptStreamingAsync("Show me the latest news as they are.", new(settings));
+
+ var builder = new StringBuilder();
+
+ await foreach (var update in streamingResult)
+ {
+ builder.Append(update.ToString());
+ }
+
+ var result = builder.ToString();
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("Transportation", result, StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact]
+ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFunctionCallingAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true);
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = AzureOpenAIToolCallBehavior.EnableKernelFunctions };
+
+ var sut = kernel.GetRequiredService();
+
+ // Act
+ var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel);
+
+ // Current way of handling function calls manually using connector specific chat message content class.
+ var toolCalls = ((AzureOpenAIChatMessageContent)result).ToolCalls.OfType().ToList();
+
+ while (toolCalls.Count > 0)
+ {
+ // Adding LLM function call request to chat history
+ chatHistory.Add(result);
+
+ // Iterating over the requested function calls and invoking them
+ foreach (var toolCall in toolCalls)
+ {
+ string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ?
+ JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue