diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index e0bfad396dcb..a4005b9d7abf 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -37,7 +37,7 @@
-
+
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs
index 931b1f0674a8..314d05876e6f 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.Linq;
using System.Text.Json;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
@@ -46,7 +47,7 @@ public void FromExecutionSettingsWhenSerializedHasPropertiesShouldPopulateSpecia
{
string jsonSettings = """
{
- "stop": "stop me",
+ "stop": ["stop me"],
"temperature": 0.5,
"top_p": 0.9,
"top_k": 100
@@ -56,7 +57,7 @@ public void FromExecutionSettingsWhenSerializedHasPropertiesShouldPopulateSpecia
var executionSettings = JsonSerializer.Deserialize(jsonSettings);
var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
- Assert.Equal("stop me", ollamaExecutionSettings.Stop);
+ Assert.Equal("stop me", ollamaExecutionSettings.Stop?.FirstOrDefault());
Assert.Equal(0.5f, ollamaExecutionSettings.Temperature);
Assert.Equal(0.9f, ollamaExecutionSettings.TopP!.Value, 0.1f);
Assert.Equal(100, ollamaExecutionSettings.TopK);
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
index 622268ecd2a5..a3cf41d62706 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
@@ -109,11 +109,11 @@ public async Task ShouldHandleServiceResponseAsync()
}
[Fact]
- public async Task GetChatMessageContentsShouldHaveModelIdDefinedAsync()
+ public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
{
//Arrange
var sut = new OllamaChatCompletionService(
- "fake-model",
+ "phi3",
new Uri("http://localhost:11434"),
httpClient: this._httpClient);
@@ -135,11 +135,11 @@ public async Task GetChatMessageContentsShouldHaveModelIdDefinedAsync()
// Assert
Assert.NotNull(message.ModelId);
- Assert.Equal("fake-model", message.ModelId);
+ Assert.Equal("phi3", message.ModelId);
}
[Fact]
- public async Task GetStreamingChatMessageContentsShouldHaveModelIdDefinedAsync()
+ public async Task GetStreamingChatMessageContentsShouldHaveModelAndMetadataAsync()
{
//Arrange
var expectedModel = "phi3";
@@ -161,11 +161,18 @@ public async Task GetStreamingChatMessageContentsShouldHaveModelIdDefinedAsync()
await foreach (var message in sut.GetStreamingChatMessageContentsAsync(chat))
{
lastMessage = message;
+ Assert.NotNull(message.Metadata);
}
// Assert
Assert.NotNull(lastMessage!.ModelId);
Assert.Equal(expectedModel, lastMessage.ModelId);
+
+ Assert.IsType(lastMessage.Metadata);
+ var metadata = lastMessage.Metadata as OllamaMetadata;
+ Assert.NotNull(metadata);
+ Assert.NotEmpty(metadata);
+ Assert.True(metadata.Done);
}
public void Dispose()
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs b/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
index 962826b525f0..fd7aba01819b 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
@@ -54,6 +54,17 @@ internal OllamaMetadata(ChatResponseStream? message) : base(new Dictionary())
+ {
+ this.TotalDuration = response.TotalDuration;
+ this.EvalCount = response.EvalCount;
+ this.EvalDuration = response.EvalDuration;
+ this.CreatedAt = response.CreatedAt;
+ this.LoadDuration = response.LoadDuration;
+ this.PromptEvalDuration = response.PromptEvalDuration;
+ this.CreatedAt = response.CreatedAt;
+ }
+
///
/// Time spent in nanoseconds evaluating the prompt
///
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
index 283c6790c549..53ba15639008 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
+using System.Collections.Generic;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.Text;
@@ -46,7 +47,7 @@ public static OllamaPromptExecutionSettings FromExecutionSettings(PromptExecutio
///
[JsonPropertyName("stop")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public string? Stop
+ public List? Stop
{
get => this._stop;
@@ -112,7 +113,7 @@ public float? Temperature
#region private ================================================================================
- private string? _stop;
+ private List? _stop;
private float? _temperature;
private float? _topP;
private int? _topK;
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
index f611b9625e88..3d3969bee7d8 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
@@ -63,17 +63,14 @@ public async Task> GetChatMessageContentsAsync
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel);
- var answer = await this._client.SendChat(request, _ => { }, cancellationToken).ConfigureAwait(false);
-
- // Ollama Client gives back the same requested history with added message at the end
- // To be compatible with this API behavior, we only return the added message (last).
- var message = answer.Last();
+ var response = await this._client.Chat(request, cancellationToken).ConfigureAwait(false);
return [new ChatMessageContent(
- role: GetAuthorRole(message.Role) ?? AuthorRole.Assistant,
- content: message.Content,
- modelId: this._client.SelectedModel,
- innerContent: message)]; // Currently the Ollama Message does not provide any metadata
+ role: GetAuthorRole(response.Message.Role) ?? AuthorRole.Assistant,
+ content: response.Message.Content,
+ modelId: response.Model,
+ innerContent: response,
+ metadata: new OllamaMetadata(response))];
}
///
@@ -89,9 +86,9 @@ public async IAsyncEnumerable GetStreamingChatMessa
await foreach (var message in this._client.StreamChat(request, cancellationToken).ConfigureAwait(false))
{
yield return new StreamingChatMessageContent(
- GetAuthorRole(message?.Message.Role),
- message?.Message.Content,
- modelId: message?.Model,
+ role: GetAuthorRole(message!.Message.Role),
+ content: message.Message.Content,
+ modelId: message.Model,
innerContent: message,
metadata: new OllamaMetadata(message));
}
@@ -130,7 +127,7 @@ private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaProm
Temperature = settings.Temperature,
TopP = settings.TopP,
TopK = settings.TopK,
- Stop = settings.Stop
+ Stop = settings.Stop?.ToArray()
},
Messages = messages.ToList(),
Model = selectedModel,