diff --git a/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs index 12f7d42b13ae..719d5eb9f951 100644 --- a/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs +++ b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.HuggingFace; using Microsoft.SemanticKernel.TextGeneration; using xRetry; @@ -62,19 +61,14 @@ public async Task RunStreamingExampleAsync() Console.WriteLine($"\n======== HuggingFace {model} streaming example ========\n"); Kernel kernel = Kernel.CreateBuilder() - .AddHuggingFaceTextGeneration( - model: model, - apiKey: TestConfiguration.HuggingFace.ApiKey) + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) .Build(); - var settings = new HuggingFacePromptExecutionSettings { UseCache = false }; - - var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:", new HuggingFacePromptExecutionSettings - { - UseCache = false - }); + var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:"); - await foreach (string text in kernel.InvokePromptStreamingAsync("Question: {{$input}}; Answer:", new(settings) { ["input"] = "What is New York?" })) + await foreach (string text in kernel.InvokePromptStreamingAsync("Question: {{$input}}; Answer:", new() { ["input"] = "What is New York?" })) { Console.Write(text); }