Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions docs/decisions/0046-kernel-content-graduation.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ Pros:
- With no deferred content we have simpler API and a single responsibility for contents.
- Can be written and read in both `Data` or `DataUri` formats.
- Can have a `Uri` reference property, which is common for specialized contexts.
- Fully serializeable.
- Fully serializable.
- Data Uri parameters support (serialization included).
- Data Uri and Base64 validation checks
- Data Uri and Data can be dynamically generated
Expand Down Expand Up @@ -197,7 +197,7 @@ Pros:
- Can be used as a `BinaryContent` type
- Can be written and read in both `Data` or `DataUri` formats.
- Can have a `Uri` dedicated for referenced location.
- Fully serializeable.
- Fully serializable.
- Data Uri parameters support (serialization included).
- Data Uri and Base64 validation checks
- Can be retrieved
Expand Down Expand Up @@ -254,7 +254,7 @@ Pros:
- Can be used as a `BinaryContent` type
- Can be written and read in both `Data` or `DataUri` formats.
- Can have a `Uri` dedicated for referenced location.
- Fully serializeable.
- Fully serializable.
- Data Uri parameters support (serialization included).
- Data Uri and Base64 validation checks
- Can be retrieved
Expand Down
2 changes: 1 addition & 1 deletion dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
<PackageVersion Include="System.Linq.Async" Version="6.0.1" />
<PackageVersion Include="System.Memory.Data" Version="8.0.0" />
<PackageVersion Include="System.Numerics.Tensors" Version="8.0.0" />
<PackageVersion Include="System.Text.Json" Version="8.0.2" />
<PackageVersion Include="System.Text.Json" Version="8.0.4" />
<PackageVersion Include="System.Threading.Tasks.Extensions" Version="4.5.4" />
<PackageVersion Include="System.ValueTuple" Version="4.5.0" />
<PackageVersion Include="OllamaSharp" Version="2.0.6" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="System.Numerics.Tensors" />
<PackageReference Include="System.Text.Json" VersionOverride="8.0.3" />
<PackageReference Include="System.Text.Json" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
</ItemGroup>

Expand Down
6 changes: 3 additions & 3 deletions dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public abstract class ServiceBase
internal readonly OllamaApiClient _client;

internal ServiceBase(string model,
Uri baseUri,
Uri endpoint,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
{
Expand All @@ -31,7 +31,7 @@ internal ServiceBase(string model,

if (httpClient is not null)
{
httpClient.BaseAddress ??= baseUri;
httpClient.BaseAddress ??= endpoint;

// Try to add User-Agent header.
if (!httpClient.DefaultRequestHeaders.TryGetValues("User-Agent", out _))
Expand All @@ -52,7 +52,7 @@ internal ServiceBase(string model,
#pragma warning disable CA2000 // Dispose objects before losing scope
// Client needs to be created to be able to inject Semantic Kernel headers
var internalClient = HttpClientProvider.GetHttpClient();
internalClient.BaseAddress = baseUri;
internalClient.BaseAddress = endpoint;
internalClient.DefaultRequestHeaders.Add("User-Agent", HttpHeaderConstant.Values.UserAgent);
internalClient.DefaultRequestHeaders.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(Kernel)));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,23 +23,23 @@ public static class OllamaKernelBuilderExtensions
/// </summary>
/// <param name="builder">The kernel builder.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">The optional service ID.</param>
/// <param name="httpClient">The optional custom HttpClient.</param>
/// <returns>The updated kernel builder.</returns>
public static IKernelBuilder AddOllamaTextGeneration(
this IKernelBuilder builder,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null,
HttpClient? httpClient = null)
{
Verify.NotNull(builder);

builder.Services.AddKeyedSingleton<ITextGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextGenerationService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
return builder;
Expand All @@ -63,7 +63,7 @@ public static IKernelBuilder AddOllamaTextGeneration(

builder.Services.AddKeyedSingleton<ITextGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextGenerationService(
model: modelId,
modelId: modelId,
ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
return builder;
Expand All @@ -74,14 +74,14 @@ public static IKernelBuilder AddOllamaTextGeneration(
/// </summary>
/// <param name="builder">The kernel builder.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">The optional service ID.</param>
/// <param name="httpClient">The optional custom HttpClient.</param>
/// <returns>The updated kernel builder.</returns>
public static IKernelBuilder AddOllamaChatCompletion(
this IKernelBuilder builder,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null,
HttpClient? httpClient = null)
{
Expand All @@ -90,8 +90,8 @@ public static IKernelBuilder AddOllamaChatCompletion(

builder.Services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new OllamaChatCompletionService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));

Expand All @@ -116,7 +116,7 @@ public static IKernelBuilder AddOllamaChatCompletion(

builder.Services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new OllamaChatCompletionService(
model: modelId,
modelId: modelId,
client: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));

Expand All @@ -128,23 +128,23 @@ public static IKernelBuilder AddOllamaChatCompletion(
/// </summary>
/// <param name="builder">The kernel builder.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">The optional service ID.</param>
/// <param name="httpClient">The optional custom HttpClient.</param>
/// <returns>The updated kernel builder.</returns>
public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
this IKernelBuilder builder,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null,
HttpClient? httpClient = null)
{
Verify.NotNull(builder);

builder.Services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextEmbeddingGenerationService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));

Expand All @@ -169,7 +169,7 @@ public static IKernelBuilder AddOllamaTextEmbeddingGeneration(

builder.Services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextEmbeddingGenerationService(
model: modelId,
modelId: modelId,
ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,21 +22,21 @@ public static class OllamaServiceCollectionExtensions
/// </summary>
/// <param name="services">The target service collection.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">The optional service ID.</param>
/// <returns>The updated kernel builder.</returns>
public static IServiceCollection AddOllamaTextGeneration(
this IServiceCollection services,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null)
{
Verify.NotNull(services);

return services.AddKeyedSingleton<ITextGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextGenerationService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
}
Expand All @@ -59,7 +59,7 @@ public static IServiceCollection AddOllamaTextGeneration(

return services.AddKeyedSingleton<ITextGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextGenerationService(
model: modelId,
modelId: modelId,
ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
}
Expand All @@ -69,21 +69,21 @@ public static IServiceCollection AddOllamaTextGeneration(
/// </summary>
/// <param name="services">The target service collection.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">Optional service ID.</param>
/// <returns>The updated service collection.</returns>
public static IServiceCollection AddOllamaChatCompletion(
this IServiceCollection services,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null)
{
Verify.NotNull(services);

services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new OllamaChatCompletionService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));

Expand All @@ -108,7 +108,7 @@ public static IServiceCollection AddOllamaChatCompletion(

return services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new OllamaChatCompletionService(
model: modelId,
modelId: modelId,
client: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
}
Expand All @@ -118,21 +118,21 @@ public static IServiceCollection AddOllamaChatCompletion(
/// </summary>
/// <param name="services">The target service collection.</param>
/// <param name="modelId">The model for text generation.</param>
/// <param name="baseUri">The base uri to Ollama hosted service.</param>
/// <param name="endpoint">The endpoint to Ollama hosted service.</param>
/// <param name="serviceId">Optional service ID.</param>
/// <returns>The updated kernel builder.</returns>
public static IServiceCollection AddOllamaTextEmbeddingGeneration(
this IServiceCollection services,
string modelId,
Uri baseUri,
Uri endpoint,
string? serviceId = null)
{
Verify.NotNull(services);

return services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextEmbeddingGenerationService(
model: modelId,
baseUri: baseUri,
modelId: modelId,
endpoint: endpoint,
httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
}
Expand All @@ -155,7 +155,7 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(

return services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new OllamaTextEmbeddingGenerationService(
model: modelId,
modelId: modelId,
ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService<ILoggerFactory>()));
}
Expand Down
54 changes: 47 additions & 7 deletions dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
using System.Collections.ObjectModel;
using System.Runtime.CompilerServices;
using OllamaSharp.Models;
using OllamaSharp.Models.Chat;

namespace Microsoft.SemanticKernel.Connectors.Ollama;

Expand All @@ -12,15 +13,45 @@ namespace Microsoft.SemanticKernel.Connectors.Ollama;
/// </summary>
public sealed class OllamaMetadata : ReadOnlyDictionary<string, object?>
{
internal OllamaMetadata(GenerateCompletionDoneResponseStream ollamaResponse) : base(new Dictionary<string, object?>())
internal OllamaMetadata(GenerateCompletionResponseStream? ollamaResponse) : base(new Dictionary<string, object?>())
{
this.TotalDuration = ollamaResponse.TotalDuration;
this.EvalCount = ollamaResponse.EvalCount;
this.EvalDuration = ollamaResponse.EvalDuration;
if (ollamaResponse is null)
{
return;
}

this.CreatedAt = ollamaResponse.CreatedAt;
this.LoadDuration = ollamaResponse.LoadDuration;
this.PromptEvalCount = ollamaResponse.PromptEvalCount;
this.PromptEvalDuration = ollamaResponse.PromptEvalDuration;
this.Done = ollamaResponse.Done;

if (ollamaResponse is GenerateCompletionDoneResponseStream doneResponse)
{
this.TotalDuration = doneResponse.TotalDuration;
this.EvalCount = doneResponse.EvalCount;
this.EvalDuration = doneResponse.EvalDuration;
this.LoadDuration = doneResponse.LoadDuration;
this.PromptEvalCount = doneResponse.PromptEvalCount;
this.PromptEvalDuration = doneResponse.PromptEvalDuration;
}
}

internal OllamaMetadata(ChatResponseStream? message) : base(new Dictionary<string, object?>())
{
if (message is null)
{
return;
}
this.CreatedAt = message?.CreatedAt;
this.Done = message?.Done;

if (message is ChatDoneResponseStream doneMessage)
{
this.TotalDuration = doneMessage.TotalDuration;
this.EvalCount = doneMessage.EvalCount;
this.EvalDuration = doneMessage.EvalDuration;
this.LoadDuration = doneMessage.LoadDuration;
this.PromptEvalCount = doneMessage.PromptEvalCount;
this.PromptEvalDuration = doneMessage.PromptEvalDuration;
}
}

/// <summary>
Expand Down Expand Up @@ -59,6 +90,15 @@ public string? CreatedAt
internal init => this.SetValueInDictionary(value);
}

/// <summary>
/// The response is done
/// </summary>
public bool? Done
{
get => this.GetValueFromDictionary() as bool?;
internal init => this.SetValueInDictionary(value);
}

/// <summary>
/// Time in nano seconds spent generating the response
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
namespace Microsoft.SemanticKernel.Connectors.Ollama;

/// <summary>
/// Ollama Execution Settings.
/// Ollama Prompt Execution Settings.
/// </summary>
public sealed class OllamaPromptExecutionSettings : PromptExecutionSettings
{
Expand Down
Loading