Skip to content

Bump Microsoft.ML.OnnxRuntimeGenAI from 0.12.2 to 0.13.0 #75

Bump Microsoft.ML.OnnxRuntimeGenAI from 0.12.2 to 0.13.0

Bump Microsoft.ML.OnnxRuntimeGenAI from 0.12.2 to 0.13.0 #75

Triggered via pull request April 6, 2026 10:35
Status Failure
Total duration 11m 33s
Artifacts

build-validation.yml

on: pull_request
Fit to window
Zoom out
Zoom in

Annotations

12 errors
Coverage Suite
Process completed with exit code 1.
SelectProviderSurfacesGeminiSuggestedAndSupportedModels: DotPilot.Tests/Settings/ViewModels/SettingsModelTests.cs#L145
Expected details { DotPilot.Presentation.ProviderDetailItem { Label = "Installed version", Value = "0.34.0" }, DotPilot.Presentation.ProviderDetailItem { Label = "Suggested model", Value = "gemini-2.5-pro" }, DotPilot.Presentation.ProviderDetailItem { Label = "Supported models", Value = "gemini-2.5-pro" } } to have an item matching ((detail.Label == "Supported models") AndAlso detail.Value.Contains("gemini-2.5-flash", Ordinal)).
EnabledGeminiProviderReportsReadyRuntimeAndSuggestedModels: DotPilot.Tests/Providers/Services/AgentProviderStatusReaderTests.cs#L198
Expected provider.SupportedModelNames { "gemini-2.5-pro" } to contain "gemini-2.5-flash".
SendMessageAsyncPersistsTranscriptErrorWhenLocalLlamaRuntimeFailsToLoad: DotPilot.Tests/ChatSessions/Execution/AgentSessionServiceTests.cs#L524
Expected streamedResults.Where(result => result.IsSuccess).Select(result => result.Value!) { DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = <null>, AgentProfileId = <null>, Author = "You", Id = "019d626503f172f0b1d24e85b06fdd52", Kind = SessionStreamEntryKind.UserMessage {value: 0}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-03ed-7212-91b7-d8964dbef9b3} }, Text = "trigger the broken llama model", Timestamp = <2026-04-06 10:44:40.5618735 +0h> }, DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = "status", AgentProfileId = <null>, Author = "System", Id = "019d626503f2713287c94f32ad782a39", Kind = SessionStreamEntryKind.Status {value: 4}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-03ed-7212-91b7-d8964dbef9b3} }, Text = "Running Editable Session Agent with LLamaSharp.", Timestamp = <2026-04-06 10:44:40.5624169 +0h> }, DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = "error", AgentProfileId = <null>, Author = "System", Id = "019d626503f378ed9bb6814fcb334594", Kind = SessionStreamEntryKind.Error {value: 5}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-03ed-7212-91b7-d8964dbef9b3} }, Text = "LLamaSharp failed before responding: Failed to load model 'C:\Users\runneradmin\AppData\Local\Temp\DotPilot.Tests\AgentSessionServiceTests\421cdca8f9d84fb69884be0900e9c5a2\broken-mistral.gguf'.", Timestamp = <2026-04-06 10:44:40.5639881 +0h> } } to have an item matching (((Convert(entry.Kind, Int32) == 5) AndAlso entry.Text.Contains("LlamaSharp failed before responding", Ordinal)) AndAlso entry.Text.Contains("Failed to load model", Ordinal)).
GetWorkspaceAsyncSeedsDefaultSystemAgentForANewStore: DotPilot.Tests/ChatSessions/Execution/AgentSessionServiceTests.cs#L33
Expected workspace.Providers to contain 5 item(s), but found 7: DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = {empty}, CanCreateAgents = True, CommandName = "debug", Details = {empty}, DisplayName = "Debug Provider", Id = DotPilot.Core.ProviderId { Value = {badf33d9-9434-8b35-bc0d-ad13fbe96f6f} }, InstalledVersion = "debug-echo", IsEnabled = True, Kind = AgentProviderKind.Debug {value: 0}, Status = AgentProviderStatus.Ready {value: 0}, StatusSummary = "Built in and ready for deterministic local testing.", SuggestedModelName = "debug-echo", SupportedModelNames = {"debug-echo"} }, DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @openai/codex", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , CanCreateAgents = False, CommandName = "codex", Details = DotPilot.Core.ChatSessions.Contracts.ProviderDetailDescriptor { { Label = "Install command", Value = "npm install -g @openai/codex" } } , DisplayName = "Codex", Id = DotPilot.Core.ProviderId { Value = {487f6a88-d438-8239-8153-806c4fe41b28} }, InstalledVersion = <null>, IsEnabled = False, Kind = AgentProviderKind.Codex {value: 1}, Status = AgentProviderStatus.Disabled {value: 2}, StatusSummary = "Provider is disabled for local agent creation. Codex CLI is not installed.", SuggestedModelName = "", SupportedModelNames = {empty} }, DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @anthropic-ai/claude-code", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , CanCreateAgents = False, CommandName = "claude", Details = DotPilot.Core.ChatSessions.Contracts.ProviderDetailDescriptor { { Label = "Install command", Value = "npm install -g @anthropic-ai/claude-code" } } , DisplayName = "Claude Code", Id = DotPilot.Core.ProviderId { Value = {7f8bf571-b16a-8bda-84ee-d925f4dcfe23} }, InstalledVersion = <null>, IsEnabled = False, Kind = AgentProviderKind.ClaudeCode {value: 2}, Status = AgentProviderStatus.Disabled {value: 2}, StatusSummary = "Provider is disabled for local agent creation. Claude Code CLI is not installed.", SuggestedModelName = "", SupportedModelNames = {empty} }DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @github/copilot", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , (Output has exceeded the maximum of 100 lines. Increase FormattingOptions.MaxLines on AssertionScope or AssertionConfiguration to include more lines.).
SendMessageShowsPersistedTranscriptErrorWhenLocalLlamaRuntimeFails: DotPilot.Tests/Chat/ViewModels/ChatModelTests.cs#L103
Expected activeSession!.Messages { DotPilot.Presentation.ChatTimelineItem { AccentLabel = "status", Author = "System", AvatarBrush = <null>, Content = "Session started with Mutable Agent on Debug Provider. Send a message when ready.", Id = "019d6264e8f271f8baa685826365e9c4", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Status {value: 4}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = <null>, Author = "You", AvatarBrush = <null>, Content = "trigger the broken llama model", Id = "019d6264e8fa7ec78c71e9913ca80552", Initial = "Y", IsCurrentUser = True, Kind = SessionStreamEntryKind.UserMessage {value: 0}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = "status", Author = "System", AvatarBrush = <null>, Content = "Running Mutable Agent with LLamaSharp.", Id = "019d6264e8fa730aacfe56a2121c242b", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Status {value: 4}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = "error", Author = "System", AvatarBrush = <null>, Content = "LLamaSharp failed before responding: Failed to load model 'C:\Users\runneradmin\AppData\Local\Temp\DotPilot.Tests\ChatModelTests\e3fd99e1b729454ab5a6dbeda02e9062\broken-mistral.gguf'.", Id = "019d6264e941788080739179fc47c379", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Error {value: 5}, Timestamp = "10:44" } } to have an item matching (((Convert(message.Kind, Int32) == 5) AndAlso message.Content.Contains("LlamaSharp failed before responding", Ordinal)) AndAlso message.Content.Contains("Failed to load model", Ordinal)).
Unit Test Suite
Process completed with exit code 1.
SelectProviderSurfacesGeminiSuggestedAndSupportedModels: DotPilot.Tests/Settings/ViewModels/SettingsModelTests.cs#L145
Expected details { DotPilot.Presentation.ProviderDetailItem { Label = "Installed version", Value = "0.34.0" }, DotPilot.Presentation.ProviderDetailItem { Label = "Suggested model", Value = "gemini-2.5-pro" }, DotPilot.Presentation.ProviderDetailItem { Label = "Supported models", Value = "gemini-2.5-pro" } } to have an item matching ((detail.Label == "Supported models") AndAlso detail.Value.Contains("gemini-2.5-flash", Ordinal)).
EnabledGeminiProviderReportsReadyRuntimeAndSuggestedModels: DotPilot.Tests/Providers/Services/AgentProviderStatusReaderTests.cs#L198
Expected provider.SupportedModelNames { "gemini-2.5-pro" } to contain "gemini-2.5-flash".
SendMessageAsyncPersistsTranscriptErrorWhenLocalLlamaRuntimeFailsToLoad: DotPilot.Tests/ChatSessions/Execution/AgentSessionServiceTests.cs#L524
Expected streamedResults.Where(result => result.IsSuccess).Select(result => result.Value!) { DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = <null>, AgentProfileId = <null>, Author = "You", Id = "019d6265666e73da900aa9c3619eedc9", Kind = SessionStreamEntryKind.UserMessage {value: 0}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-666a-7ce1-8023-42de71838bae} }, Text = "trigger the broken llama model", Timestamp = <2026-04-06 10:45:05.7740255 +0h> }, DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = "status", AgentProfileId = <null>, Author = "System", Id = "019d6265666e7582bdb67e3ffcc07d8b", Kind = SessionStreamEntryKind.Status {value: 4}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-666a-7ce1-8023-42de71838bae} }, Text = "Running Editable Session Agent with LLamaSharp.", Timestamp = <2026-04-06 10:45:05.7745909 +0h> }, DotPilot.Core.ChatSessions.Contracts.SessionStreamEntry { AccentLabel = "error", AgentProfileId = <null>, Author = "System", Id = "019d62656670774ba64edb7d67f2e42b", Kind = SessionStreamEntryKind.Error {value: 5}, SessionId = DotPilot.Core.SessionId { Value = {019d6265-666a-7ce1-8023-42de71838bae} }, Text = "LLamaSharp failed before responding: Failed to load model 'C:\Users\runneradmin\AppData\Local\Temp\DotPilot.Tests\AgentSessionServiceTests\1dd31081950a425da1ce33fc30a07866\broken-mistral.gguf'.", Timestamp = <2026-04-06 10:45:05.7761988 +0h> } } to have an item matching (((Convert(entry.Kind, Int32) == 5) AndAlso entry.Text.Contains("LlamaSharp failed before responding", Ordinal)) AndAlso entry.Text.Contains("Failed to load model", Ordinal)).
GetWorkspaceAsyncSeedsDefaultSystemAgentForANewStore: DotPilot.Tests/ChatSessions/Execution/AgentSessionServiceTests.cs#L33
Expected workspace.Providers to contain 5 item(s), but found 7: DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = {empty}, CanCreateAgents = True, CommandName = "debug", Details = {empty}, DisplayName = "Debug Provider", Id = DotPilot.Core.ProviderId { Value = {badf33d9-9434-8b35-bc0d-ad13fbe96f6f} }, InstalledVersion = "debug-echo", IsEnabled = True, Kind = AgentProviderKind.Debug {value: 0}, Status = AgentProviderStatus.Ready {value: 0}, StatusSummary = "Built in and ready for deterministic local testing.", SuggestedModelName = "debug-echo", SupportedModelNames = {"debug-echo"} }, DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @openai/codex", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , CanCreateAgents = False, CommandName = "codex", Details = DotPilot.Core.ChatSessions.Contracts.ProviderDetailDescriptor { { Label = "Install command", Value = "npm install -g @openai/codex" } } , DisplayName = "Codex", Id = DotPilot.Core.ProviderId { Value = {487f6a88-d438-8239-8153-806c4fe41b28} }, InstalledVersion = <null>, IsEnabled = False, Kind = AgentProviderKind.Codex {value: 1}, Status = AgentProviderStatus.Disabled {value: 2}, StatusSummary = "Provider is disabled for local agent creation. Codex CLI is not installed.", SuggestedModelName = "", SupportedModelNames = {empty} }, DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @anthropic-ai/claude-code", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , CanCreateAgents = False, CommandName = "claude", Details = DotPilot.Core.ChatSessions.Contracts.ProviderDetailDescriptor { { Label = "Install command", Value = "npm install -g @anthropic-ai/claude-code" } } , DisplayName = "Claude Code", Id = DotPilot.Core.ProviderId { Value = {7f8bf571-b16a-8bda-84ee-d925f4dcfe23} }, InstalledVersion = <null>, IsEnabled = False, Kind = AgentProviderKind.ClaudeCode {value: 2}, Status = AgentProviderStatus.Disabled {value: 2}, StatusSummary = "Provider is disabled for local agent creation. Claude Code CLI is not installed.", SuggestedModelName = "", SupportedModelNames = {empty} }DotPilot.Core.ChatSessions.Contracts.ProviderStatusDescriptor { Actions = DotPilot.Core.ChatSessions.Contracts.ProviderActionDescriptor { { Command = "npm install -g @github/copilot", Kind = ProviderActionKind.CopyCommand {value: 0}, Label = "Install", Summary = "Install the CLI, then refresh settings." } } , (Output has exceeded the maximum of 100 lines. Increase FormattingOptions.MaxLines on AssertionScope or AssertionConfiguration to include more lines.).
SendMessageShowsPersistedTranscriptErrorWhenLocalLlamaRuntimeFails: DotPilot.Tests/Chat/ViewModels/ChatModelTests.cs#L103
Expected activeSession!.Messages { DotPilot.Presentation.ChatTimelineItem { AccentLabel = "status", Author = "System", AvatarBrush = <null>, Content = "Session started with Mutable Agent on Debug Provider. Send a message when ready.", Id = "019d626548777c54a652663b29dbb1fd", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Status {value: 4}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = <null>, Author = "You", AvatarBrush = <null>, Content = "trigger the broken llama model", Id = "019d6265487e7778ab4e379866401a28", Initial = "Y", IsCurrentUser = True, Kind = SessionStreamEntryKind.UserMessage {value: 0}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = "status", Author = "System", AvatarBrush = <null>, Content = "Running Mutable Agent with LLamaSharp.", Id = "019d6265487f75bdab8e60e40aee3c0a", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Status {value: 4}, Timestamp = "10:44" }, DotPilot.Presentation.ChatTimelineItem { AccentLabel = "error", Author = "System", AvatarBrush = <null>, Content = "LLamaSharp failed before responding: Failed to load model 'C:\Users\runneradmin\AppData\Local\Temp\DotPilot.Tests\ChatModelTests\4f85e8b860824b5290276845afbe1d2c\broken-mistral.gguf'.", Id = "019d626548f1708b8323dce95c452f7e", Initial = "S", IsCurrentUser = False, Kind = SessionStreamEntryKind.Error {value: 5}, Timestamp = "10:44" } } to have an item matching (((Convert(message.Kind, Int32) == 5) AndAlso message.Content.Contains("LlamaSharp failed before responding", Ordinal)) AndAlso message.Content.Contains("Failed to load model", Ordinal)).