Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion packages/opencode/src/session/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ export namespace SessionPrompt {

step++
if (step === 1)
ensureTitle({
await ensureTitle({
session,
modelID: lastUser.model.modelID,
providerID: lastUser.model.providerID,
Expand Down Expand Up @@ -1983,6 +1983,26 @@ NOTE: At any point in time through this workflow you should feel free to ask the
providerID: ProviderID
modelID: ModelID
}) {
function fallback(msg: MessageV2.WithParts) {
const line = msg.parts
.filter((part) => part.type === "text")
.flatMap((part) => part.text.split("\n"))
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!line) return
const title = line
.replace(/[\s\-:;,.!?]+$/g, "")
.split(/\s+/)
.filter(Boolean)
.slice(0, 5)
.join(" ")
if (!title) return
return Session.setTitle({ sessionID: input.session.id, title: title.length > 100 ? title.substring(0, 97) + "..." : title }).catch((err) => {
if (NotFoundError.isInstance(err)) return
throw err
})
}

if (input.session.parentID) return
if (!Session.isDefaultTitle(input.session.title)) return

Expand Down Expand Up @@ -2052,5 +2072,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
} catch (error) {
log.error("failed to generate title", { error })
}

return fallback(firstRealUser)
}
}
157 changes: 81 additions & 76 deletions packages/opencode/test/session/prompt.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import path from "path"
import { describe, expect, test } from "bun:test"
import { describe, expect, spyOn, test } from "bun:test"
import { NamedError } from "@opencode-ai/util/error"
import { fileURLToPath } from "url"
import { Instance } from "../../src/project/instance"
Expand All @@ -9,84 +9,19 @@
import { SessionPrompt } from "../../src/session/prompt"
import { Log } from "../../src/util/log"
import { tmpdir } from "../fixture/fixture"
import { LLM } from "../../src/session/llm"
import { Filesystem } from "../../src/util/filesystem"

Log.init({ print: false })

function defer<T>() {
let resolve!: (value: T | PromiseLike<T>) => void
const promise = new Promise<T>((done) => {
resolve = done
})
return { promise, resolve }
}

function chat(text: string) {
const payload =
[
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { role: "assistant" } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { content: text } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: {}, finish_reason: "stop" }],
})}`,
"data: [DONE]",
].join("\n\n") + "\n\n"

const encoder = new TextEncoder()
return new ReadableStream<Uint8Array>({
start(ctrl) {
ctrl.enqueue(encoder.encode(payload))
ctrl.close()
},
})
}

function hanging(ready: () => void) {
const encoder = new TextEncoder()
let timer: ReturnType<typeof setTimeout> | undefined
const first =
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { role: "assistant" } }],
})}` + "\n\n"
const rest =
[
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: { content: "late" } }],
})}`,
`data: ${JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion.chunk",
choices: [{ delta: {}, finish_reason: "stop" }],
})}`,
"data: [DONE]",
].join("\n\n") + "\n\n"

return new ReadableStream<Uint8Array>({
start(ctrl) {
ctrl.enqueue(encoder.encode(first))
ready()
timer = setTimeout(() => {
ctrl.enqueue(encoder.encode(rest))
ctrl.close()
}, 10000)
},
cancel() {
if (timer) clearTimeout(timer)
},
})
async function loadFixture(providerID: string, modelID: string) {
const fixturePath = path.join(import.meta.dir, "../tool/fixtures/models-api.json")
const data = await Filesystem.readJson<Record<string, { models: Record<string, unknown> }>>(fixturePath)
const provider = data[providerID]
if (!provider) throw new Error(`Missing provider in fixture: ${providerID}`)
const model = provider.models[modelID]
if (!model) throw new Error(`Missing model in fixture: ${modelID}`)
return model
}

describe("session.prompt missing file", () => {
Expand Down Expand Up @@ -237,7 +172,7 @@
return new Response("not found", { status: 404 })
}
calls++
return new Response(chat("packages/opencode/src/session/processor.ts"), {

Check failure on line 175 in packages/opencode/test/session/prompt.test.ts

View workflow job for this annotation

GitHub Actions / unit (linux)

ReferenceError: chat is not defined

at fetch (/home/runner/_work/opencode/opencode/packages/opencode/test/session/prompt.test.ts:175:29)

Check failure on line 175 in packages/opencode/test/session/prompt.test.ts

View workflow job for this annotation

GitHub Actions / unit (linux)

ReferenceError: chat is not defined

at fetch (/home/runner/_work/opencode/opencode/packages/opencode/test/session/prompt.test.ts:175:29)
status: 200,
headers: { "Content-Type": "text/event-stream" },
})
Expand Down Expand Up @@ -295,7 +230,7 @@
})

test("records aborted errors when prompt is cancelled mid-stream", async () => {
const ready = defer<void>()

Check failure on line 233 in packages/opencode/test/session/prompt.test.ts

View workflow job for this annotation

GitHub Actions / unit (linux)

ReferenceError: defer is not defined

at <anonymous> (/home/runner/_work/opencode/opencode/packages/opencode/test/session/prompt.test.ts:233:19)
const server = Bun.serve({
port: 0,
fetch(req) {
Expand Down Expand Up @@ -516,3 +451,73 @@
})
}, 30000)
})

describe("session title fallback", () => {
test("falls back to the first few words when title generation returns nothing", async () => {
const model = await loadFixture("openai", "gpt-5.2")
const stream = spyOn(LLM, "stream")
.mockResolvedValueOnce({
text: Promise.resolve(""),
} as Awaited<ReturnType<typeof LLM.stream>>)
.mockResolvedValueOnce({
fullStream: (async function* () {
yield { type: "start" }
yield {
type: "finish-step",
finishReason: "stop",
usage: { inputTokens: 1, outputTokens: 1, totalTokens: 2 },
providerMetadata: {},
}
yield { type: "finish" }
})(),
} as unknown as Awaited<ReturnType<typeof LLM.stream>>)

await using tmp = await tmpdir({
git: true,
config: {
enabled_providers: ["openai"],
provider: {
openai: {
options: {
apiKey: "test-openai-key",
models: {
"gpt-5.2": model,
},
},
},
},
agent: {
build: {
model: "openai/gpt-5.2",
},
},
},
})

await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})

await SessionPrompt.prompt({
sessionID: session.id,
agent: "build",
parts: [{ type: "text", text: "All of my sessions are names new session is the a telemetry package sideffect?" }],
})

expect(stream).toHaveBeenCalledTimes(2)

let info = await Session.get(session.id)
for (let i = 0; i < 20 && info?.title !== "All of my sessions are"; i++) {
await new Promise((resolve) => setTimeout(resolve, 50))
info = await Session.get(session.id)
}
expect(info?.title).toBe("All of my sessions are")

Check failure on line 515 in packages/opencode/test/session/prompt.test.ts

View workflow job for this annotation

GitHub Actions / unit (linux)

error: expect(received).toBe(expected)

Expected: "All of my sessions are" Received: "New session - 2026-03-30T15:07:48.391Z" at <anonymous> (/home/runner/_work/opencode/opencode/packages/opencode/test/session/prompt.test.ts:515:29)

await Session.remove(session.id)
},
})

stream.mockRestore()
})
})
Loading