From 84b2622c9e378bd266e01ef58fb7039cc090ad13 Mon Sep 17 00:00:00 2001 From: Paul Gear Date: Wed, 25 Feb 2026 15:41:26 +1000 Subject: [PATCH] For non-anthropic providers, combine system prompts This is a fix for issue #5034 which works for me using Qwen3.5-35B-A3B. --- packages/opencode/src/session/llm.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index 4e42fb0d2ec7..3e2171af37b2 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -223,12 +223,14 @@ export namespace LLM { }, maxRetries: input.retries ?? 0, messages: [ - ...system.map( - (x): ModelMessage => ({ - role: "system", - content: x, - }), - ), + ...(input.model.api.npm === "@ai-sdk/anthropic" + ? system.map( + (x): ModelMessage => ({ + role: "system", + content: x, + }), + ) + : ([{ role: "system", content: system.join("\n") }] as ModelMessage[])), ...input.messages, ], model: wrapLanguageModel({