diff --git a/packages/opencode/src/cli/cmd/mcp.ts b/packages/opencode/src/cli/cmd/mcp.ts index c45b9e55d0f8..2763d6a26bd8 100644 --- a/packages/opencode/src/cli/cmd/mcp.ts +++ b/packages/opencode/src/cli/cmd/mcp.ts @@ -655,12 +655,13 @@ export const McpDebugCommand = cmd({ headers: { "Content-Type": "application/json", Accept: "application/json, text/event-stream", + ...(serverConfig.headers ?? {}), }, body: JSON.stringify({ jsonrpc: "2.0", method: "initialize", params: { - protocolVersion: "2024-11-05", + protocolVersion: "2025-06-18", capabilities: {}, clientInfo: { name: "opencode-debug", version: Installation.VERSION }, }, @@ -699,6 +700,7 @@ export const McpDebugCommand = cmd({ // Try creating transport with auth provider to trigger discovery const transport = new StreamableHTTPClientTransport(new URL(serverConfig.url), { authProvider, + requestInit: serverConfig.headers ? { headers: serverConfig.headers } : undefined, }) try { diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 3b296a927aa4..f553b8bc4635 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -357,9 +357,8 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ async function bootstrap() { console.log("bootstrapping") - const start = Date.now() - 30 * 24 * 60 * 60 * 1000 const sessionListPromise = sdk.client.session - .list({ start: start }) + .list({}) .then((x) => (x.data ?? []).toSorted((a, b) => a.id.localeCompare(b.id))) // blocking - include session.list when continuing a session diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index e3bf4cac0688..8c0e43d574a6 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -739,7 +739,10 @@ export namespace MCP { }, ) - const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider }) + const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { + authProvider, + requestInit: mcpConfig.headers ? { headers: mcpConfig.headers } : undefined, + }) return yield* Effect.tryPromise({ try: () => { diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index c6784f450244..881a5c9dea50 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1416,12 +1416,21 @@ export namespace Provider { const sdk = await resolveSDK(model, s) try { + const mergedOptions = { ...provider.options, ...model.options } const language = s.modelLoaders[model.providerID] - ? await s.modelLoaders[model.providerID](sdk, model.api.id, { - ...provider.options, - ...model.options, - }) - : sdk.languageModel(model.api.id) + ? await s.modelLoaders[model.providerID](sdk, model.api.id, mergedOptions) + : (() => { + // For custom providers using @ai-sdk/azure, apply azure-specific logic + if (model.api.npm === "@ai-sdk/azure") { + if (useLanguageModel(sdk)) return sdk.languageModel(model.api.id) + if (mergedOptions["useCompletionUrls"]) { + return sdk.chat(model.api.id) + } else { + return sdk.responses(model.api.id) + } + } + return sdk.languageModel(model.api.id) + })() s.models.set(key, language) return language } catch (e) { diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index f651a5b91aaf..1b14b566a646 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -935,7 +935,9 @@ export namespace ProviderTransform { } const key = sdkKey(model.api.npm) ?? model.providerID - return { [key]: options } + // @ai-sdk/azure delegates to OpenAIChatLanguageModel which expects "openai" key + const azureKey = model.api.npm === "@ai-sdk/azure" ? "openai" : key + return { [azureKey]: options } } export function maxOutputTokens(model: Provider.Model): number { diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index dbf815bd6d79..5d612c235f66 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -215,7 +215,7 @@ export namespace SessionPrompt { : await MessageV2.toModelMessages(context, mdl) const result = await LLM.stream({ agent: ag, - user: firstInfo, + user: { ...firstInfo, variant: undefined }, system: [], small: true, tools: {}, diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 72ba9dba5a5c..641dff30420c 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -2282,3 +2282,45 @@ test("cloudflare-ai-gateway forwards config metadata options", async () => { }, }) }) + +test("custom provider using @ai-sdk/azure respects useCompletionUrls option", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + foundary: { + npm: "@ai-sdk/azure", + options: { + baseURL: "https://custom-domain/openai", + apiKey: "test-key", + apiVersion: "2025-04-01-preview", + useDeploymentBasedUrls: true, + useCompletionUrls: true, + }, + models: { + "gpt-5.4": { + name: "GPT-5.4", + }, + }, + }, + }, + }), + ) + }, + }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const providers = await Provider.list() + expect(providers[ProviderID.make("foundary")]).toBeDefined() + expect(providers[ProviderID.make("foundary")].options.useCompletionUrls).toBe(true) + const model = await Provider.getModel(ProviderID.make("foundary"), ModelID.make("gpt-5.4")) + expect(model).toBeDefined() + // Verify the model has the correct npm package + expect(model.api.npm).toBe("@ai-sdk/azure") + }, + }) +}) diff --git a/packages/opencode/test/session/prompt.test.ts b/packages/opencode/test/session/prompt.test.ts index 51d2e11941ae..d5ddc06b9823 100644 --- a/packages/opencode/test/session/prompt.test.ts +++ b/packages/opencode/test/session/prompt.test.ts @@ -516,3 +516,80 @@ describe("session.agent-resolution", () => { }) }, 30000) }) + +describe("session.title generation", () => { + test("strips variant from title generation to avoid effort parameter leakage", async () => { + let titleRequestCaptured = false + let titleRequestHasEffort = false + const server = Bun.serve({ + port: 0, + fetch(req) { + const url = new URL(req.url) + if (!url.pathname.endsWith("/chat/completions")) { + return new Response("not found", { status: 404 }) + } + const body = JSON.parse(await req.text()) + const isTitleRequest = body.messages?.some((m: any) => m.content?.includes("Generate a title")) + if (isTitleRequest) { + titleRequestCaptured = true + titleRequestHasEffort = !!body.output_config?.effort || !!body.reasoning_effort + } + return new Response(chat("Test session title"), { + status: 200, + headers: { "Content-Type": "text/event-stream" }, + }) + }, + }) + + try { + await using tmp = await tmpdir({ + git: true, + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + enabled_providers: ["alibaba"], + provider: { + alibaba: { + options: { + apiKey: "test-key", + baseURL: `${server.url.origin}/v1`, + }, + }, + }, + agent: { + build: { + model: "alibaba/qwen-plus", + variant: "max", + }, + }, + }), + ) + }, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const session = await Session.create({}) + const result = await SessionPrompt.prompt({ + sessionID: session.id, + agent: "build", + variant: "max", + parts: [{ type: "text", text: "Hello, help me with something" }], + }) + + expect(result.info.role).toBe("assistant") + await new Promise((r) => setTimeout(r, 500)) + expect(titleRequestCaptured).toBe(true) + expect(titleRequestHasEffort).toBe(false) + const updated = await Session.get(session.id) + expect(updated.title).toBe("Test session title") + }, + }) + } finally { + server.stop(true) + } + }) +})