diff --git a/packages/app/src/pages/session.tsx b/packages/app/src/pages/session.tsx
index 21ba4e7d7b46..e79f27f21bae 100644
--- a/packages/app/src/pages/session.tsx
+++ b/packages/app/src/pages/session.tsx
@@ -1,4 +1,4 @@
-import { onCleanup, Show, Match, Switch, createMemo, createEffect, on, onMount } from "solid-js"
+import { For, onCleanup, Show, Match, Switch, createMemo, createEffect, createSignal, on, onMount } from "solid-js"
import { createMediaQuery } from "@solid-primitives/media"
import { createResizeObserver } from "@solid-primitives/resize-observer"
import { useLocal } from "@/context/local"
@@ -15,7 +15,8 @@ import { checksum, base64Encode } from "@opencode-ai/util/encode"
import { useDialog } from "@opencode-ai/ui/context/dialog"
import { useLanguage } from "@/context/language"
import { useNavigate, useParams } from "@solidjs/router"
-import { UserMessage } from "@opencode-ai/sdk/v2"
+import { UserMessage, AssistantMessage } from "@opencode-ai/sdk/v2"
+import type { FileDiff } from "@opencode-ai/sdk/v2/client"
import { useSDK } from "@/context/sdk"
import { usePrompt } from "@/context/prompt"
import { useComments } from "@/context/comments"
diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-inspect.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-inspect.tsx
new file mode 100644
index 000000000000..fdc55bd8b904
--- /dev/null
+++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-inspect.tsx
@@ -0,0 +1,282 @@
+import { TextAttributes, ScrollBoxRenderable } from "@opentui/core"
+import { useKeyboard } from "@opentui/solid"
+import { useDialog } from "../../ui/dialog"
+import { useTheme } from "@tui/context/theme"
+import type { Part, AssistantMessage } from "@opencode-ai/sdk/v2"
+import { Clipboard } from "../../util/clipboard"
+import { useToast } from "../../ui/toast"
+import { createSignal, Show } from "solid-js"
+
+interface DialogInspectProps {
+ message: AssistantMessage
+ parts: Part[]
+}
+
+function toYaml(obj: any, indent = 0): string {
+ if (obj === null) return "null"
+ if (obj === undefined) return "undefined"
+ if (typeof obj !== "object") return String(obj)
+
+ const spaces = " ".repeat(indent)
+
+ if (Array.isArray(obj)) {
+ if (obj.length === 0) return "[]"
+ return obj
+ .map((item) => {
+ if (typeof item === "object" && item !== null) {
+ return `\n${spaces}- ${toYaml(item, indent + 2).trimStart()}`
+ }
+ return `\n${spaces}- ${String(item)}`
+ })
+ .join("")
+ }
+
+ const keys = Object.keys(obj)
+ if (keys.length === 0) return "{}"
+
+ return keys
+ .map((key) => {
+ const value = obj[key]
+ if (typeof value === "object" && value !== null) {
+ if (Array.isArray(value) && value.length === 0) return `\n${spaces}${key}: []`
+ if (Object.keys(value).length === 0) return `\n${spaces}${key}: {}`
+ return `\n${spaces}${key}:${toYaml(value, indent + 2)}`
+ }
+ if (typeof value === "string" && value.includes("\n")) {
+ return `\n${spaces}${key}: |\n${value
+ .split("\n")
+ .map((l) => spaces + " " + l)
+ .join("\n")}`
+ }
+ return `\n${spaces}${key}: ${String(value)}`
+ })
+ .join("")
+}
+
+function PartView(props: { part: Part; theme: any; syntax: any }) {
+ const { part, theme, syntax } = props
+
+ if (part.type === "text") {
+ return (
+
+
+ Text
+
+ {part.text}
+
+ )
+ }
+
+ if (part.type === "patch") {
+ return (
+
+
+ Patch ({part.hash.substring(0, 7)})
+
+ Updated files:
+
+ {part.files.map((f) => (
+ - {f}
+ ))}
+
+
+ )
+ }
+
+ if (part.type === "tool") {
+ return (
+
+
+ Tool Use: {part.tool} ({part.state.status})
+
+
+ Input:
+ {toYaml(part.state.input).trim()}
+
+
+
+ Output:
+ {(part.state as any).output}
+
+
+
+
+ Error:
+ {(part.state as any).error}
+
+
+
+ )
+ }
+
+ if (part.type === "reasoning") {
+ return (
+
+
+ Reasoning
+
+ {part.text}
+
+ )
+ }
+
+ if (part.type === "file") {
+ return (
+
+
+ File Attachment
+
+ Name: {part.filename || "Unknown"}
+ Mime: {part.mime}
+ URL: {part.url}
+
+ )
+ }
+
+ return (
+
+
+ {part.type}
+
+
+
+ )
+}
+
+export function DialogInspect(props: DialogInspectProps) {
+ const { theme, syntax } = useTheme()
+ const dialog = useDialog()
+ const toast = useToast()
+
+ // State for raw mode
+ const [showRaw, setShowRaw] = createSignal(false)
+
+ // Set dialog size to large
+ dialog.setSize("xlarge")
+
+ // Ref to scrollbox for keyboard scrolling
+ let scrollRef: ScrollBoxRenderable | undefined
+
+ const handleCopy = () => {
+ Clipboard.copy(JSON.stringify(props.parts, null, 2))
+ .then(() => toast.show({ message: "Message copied to clipboard", variant: "success" }))
+ .catch(() => toast.show({ message: "Failed to copy message", variant: "error" }))
+ }
+
+ const handleToggleRaw = () => {
+ setShowRaw((prev) => !prev)
+ }
+
+ // Keyboard shortcuts
+ useKeyboard((evt) => {
+ // C - Copy
+ if (evt.name === "c" && !evt.ctrl && !evt.meta) {
+ evt.preventDefault()
+ handleCopy()
+ }
+
+ // S - Toggle raw/parsed
+ if (evt.name === "s" && !evt.ctrl && !evt.meta) {
+ evt.preventDefault()
+ handleToggleRaw()
+ }
+
+ // Arrow keys - scroll 1 line
+ if (evt.name === "down") {
+ evt.preventDefault()
+ scrollRef?.scrollBy(1)
+ }
+
+ if (evt.name === "up") {
+ evt.preventDefault()
+ scrollRef?.scrollBy(-1)
+ }
+
+ // Page keys - scroll page
+ if (evt.name === "pagedown") {
+ evt.preventDefault()
+ if (scrollRef) {
+ scrollRef.scrollBy(scrollRef.height)
+ }
+ }
+
+ if (evt.name === "pageup") {
+ evt.preventDefault()
+ if (scrollRef) {
+ scrollRef.scrollBy(-scrollRef.height)
+ }
+ }
+ })
+
+ return (
+
+
+
+ Message Inspection ({props.message.id})
+
+ dialog.clear()}>
+ [esc]
+
+
+
+ {
+ scrollRef = r
+ }}
+ flexGrow={1}
+ border={["bottom", "top"]}
+ borderColor={theme.borderSubtle}
+ >
+
+ }
+ >
+
+ {props.parts
+ .filter((p) => !["step-start", "step-finish", "reasoning"].includes(p.type))
+ .map((part) => (
+
+ ))}
+
+
+
+
+
+
+ ↑↓ scroll
+ PgUp/PgDn page
+ S toggle
+ C copy
+
+
+
+ {showRaw() ? "Show Parsed" : "Show Raw"}
+
+
+ Copy
+
+
+
+
+ )
+}
diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-timeline.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-timeline.tsx
index 87248a6a8ba6..b2c19656f8ed 100644
--- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-timeline.tsx
+++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-timeline.tsx
@@ -1,37 +1,203 @@
import { createMemo, onMount } from "solid-js"
import { useSync } from "@tui/context/sync"
-import { DialogSelect, type DialogSelectOption } from "@tui/ui/dialog-select"
-import type { TextPart } from "@opencode-ai/sdk/v2"
+import { DialogSelect, type DialogSelectOption, type DialogSelectRef } from "@tui/ui/dialog-select"
+import type { Part, Message, AssistantMessage, ToolPart, FilePart } from "@opencode-ai/sdk/v2"
import { Locale } from "@/util/locale"
import { DialogMessage } from "./dialog-message"
+import { DialogInspect } from "./dialog-inspect"
import { useDialog } from "../../ui/dialog"
import type { PromptInfo } from "../../component/prompt/history"
+import { Token } from "@/util/token"
+import { useTheme } from "@tui/context/theme"
+import { useSDK } from "@tui/context/sdk"
+import fs from "fs"
+import path from "path"
+import { produce } from "solid-js/store"
+import { Binary } from "@opencode-ai/util/binary"
+import { Global } from "@/global"
+
+// Module-level variable to store the selected message when opening details
+let timelineSelection: string | undefined
+
+function formatTokenCount(tokens: number): string {
+ return tokens.toString().padStart(7)
+}
+
+function getMessageTokens(message: Message, parts: Part[], isCompaction: boolean = false): number {
+ if (message.role === "assistant") {
+ const assistantMsg = message as AssistantMessage
+ let total = 0
+
+ // Calculate tokens for this message turn only (not cumulative)
+ if (assistantMsg.tokens) {
+ const input = assistantMsg.tokens.input || 0
+ const output = assistantMsg.tokens.output || 0
+ const cacheWrite = assistantMsg.tokens.cache?.write || 0
+ const reasoning = assistantMsg.tokens.reasoning || 0
+
+ // Exclude cacheRead as it represents cumulative context, not this message's cost
+ total = input + output + cacheWrite + reasoning
+ } else {
+ // Fall back to aggregating from step-finish parts
+ for (const part of parts) {
+ if (part.type === "step-finish" && (part as any).tokens) {
+ const tokens = (part as any).tokens
+ total += tokens.input + tokens.output + (tokens.reasoning || 0)
+ }
+ }
+ }
+
+ // Add tool output tokens (not included in message.tokens)
+ for (const part of parts) {
+ if (part.type === "tool") {
+ const toolPart = part as ToolPart
+ const state = toolPart.state as any
+ if (state?.output) {
+ const output = typeof state.output === "string" ? state.output : JSON.stringify(state.output)
+ total += Token.estimate(output)
+ }
+ }
+ }
+
+ return total
+ }
+
+ // User message - estimate from parts
+ let estimate = 0
+ for (const part of parts) {
+ if (part.type === "text" && !part.synthetic && !part.ignored) {
+ estimate += Token.estimate(part.text)
+ }
+ if (part.type === "file") {
+ const filePart = part as FilePart
+ if (filePart.source?.text?.value) {
+ estimate += Token.estimate(filePart.source.text.value)
+ } else if (filePart.mime.startsWith("image/")) {
+ estimate += Token.estimateImage(filePart.url)
+ }
+ }
+ }
+ return estimate
+}
+
+function getMessageSummary(parts: Part[]): string {
+ const textPart = parts.find((x) => x.type === "text" && !x.synthetic && !x.ignored)
+ if (textPart && textPart.type === "text") {
+ return textPart.text.replace(/\n/g, " ")
+ }
+
+ const toolParts = parts.filter((x) => x.type === "tool") as ToolPart[]
+ if (toolParts.length > 0) {
+ const tools = toolParts.map((p) => p.tool).join(", ")
+ return `[${tools}]`
+ }
+
+ const fileParts = parts.filter((x) => x.type === "file") as FilePart[]
+ if (fileParts.length > 0) {
+ const files = fileParts.map((p) => p.filename || "file").join(", ")
+ return `[files: ${files}]`
+ }
+
+ return "[no content]"
+}
export function DialogTimeline(props: {
sessionID: string
onMove: (messageID: string) => void
setPrompt?: (prompt: PromptInfo) => void
}) {
- const sync = useSync()
+ const syncCtx = useSync()
+ const sync = syncCtx.data
+ const setStore = syncCtx.set
const dialog = useDialog()
+ const { theme } = useTheme()
+ const sdk = useSDK()
+
+ // Capture the stored selection and clear it
+ const initialSelection = timelineSelection
+ timelineSelection = undefined
+
+ let selectRef: DialogSelectRef | undefined
onMount(() => {
dialog.setSize("large")
+
+ // Restore selection after mount if we have one
+ if (initialSelection && selectRef) {
+ setTimeout(() => {
+ selectRef?.moveToValue(initialSelection)
+ }, 0)
+ }
})
const options = createMemo((): DialogSelectOption[] => {
- const messages = sync.data.message[props.sessionID] ?? []
+ const messages = sync.message[props.sessionID] ?? []
const result = [] as DialogSelectOption[]
+
for (const message of messages) {
- if (message.role !== "user") continue
- const part = (sync.data.part[message.id] ?? []).find(
- (x) => x.type === "text" && !x.synthetic && !x.ignored,
- ) as TextPart
- if (!part) continue
+ const parts = sync.part[message.id] ?? []
+
+ // Check if this is a compaction summary message
+ const isCompactionSummary = message.role === "assistant" && (message as AssistantMessage).summary === true
+
+ // Get the token count for this specific message (delta only, not cumulative)
+ const messageTokens = getMessageTokens(message, parts, isCompactionSummary)
+
+ // Display the tokens directly (no cumulative calculation needed)
+ const delta = messageTokens
+
+ const formatted = formatTokenCount(delta)
+
+ // Token count color based on thresholds (cold to hot gradient)
+ // Using delta for color coding
+ let tokenColor = theme.textMuted // grey < 1k
+ if (delta >= 20000) {
+ tokenColor = theme.error // red 20k+
+ } else if (delta >= 10000) {
+ tokenColor = theme.warning // orange 10k+
+ } else if (delta >= 5000) {
+ tokenColor = theme.accent // purple 5k+
+ } else if (delta >= 2000) {
+ tokenColor = theme.secondary // blue 2k+
+ } else if (delta >= 1000) {
+ tokenColor = theme.info // cyan 1k+
+ }
+
+ const summary = getMessageSummary(parts)
+
+ // Skip messages with no content
+ if (summary === "[no content]") continue
+
+ // Debug: Extract token breakdown for assistant messages
+ let tokenDebug = ""
+ if (message.role === "assistant") {
+ const assistantMsg = message as AssistantMessage
+ if (assistantMsg.tokens) {
+ const input = assistantMsg.tokens.input || 0
+ const output = assistantMsg.tokens.output || 0
+ const cacheRead = assistantMsg.tokens.cache?.read || 0
+ const cacheWrite = assistantMsg.tokens.cache?.write || 0
+ const reasoning = assistantMsg.tokens.reasoning || 0
+ tokenDebug = `(${input}/${output}/${cacheRead}/${cacheWrite}/${reasoning}) `
+ }
+ }
+
+ const prefix = isCompactionSummary ? "[compaction] " : message.role === "assistant" ? "agent: " : ""
+ const title = tokenDebug + prefix + summary
+
+ const gutter = [{formatted}]
+
+ // Normal assistant messages use textMuted for title
+ const isAssistant = message.role === "assistant" && !isCompactionSummary
+
result.push({
- title: part.text.replace(/\n/g, " "),
+ title,
+ gutter: isCompactionSummary ? [{formatted}] : gutter,
value: message.id,
footer: Locale.time(message.time.created),
+ titleColor: isCompactionSummary ? theme.success : isAssistant ? theme.textMuted : undefined,
+ footerColor: isCompactionSummary ? theme.success : undefined,
+ bg: isCompactionSummary ? theme.success : undefined,
onSelect: (dialog) => {
dialog.replace(() => (
@@ -39,9 +205,132 @@ export function DialogTimeline(props: {
},
})
}
+
result.reverse()
return result
})
- return props.onMove(option.value)} title="Timeline" options={options()} />
+ const handleDelete = async (messageID: string) => {
+ try {
+ const storageBase = path.join(Global.Path.data, "storage")
+
+ // Delete message file
+ const messagePath = path.join(storageBase, "message", props.sessionID, `${messageID}.json`)
+ if (fs.existsSync(messagePath)) {
+ fs.unlinkSync(messagePath)
+ }
+
+ // Delete all part files
+ const partsDir = path.join(storageBase, "part", messageID)
+ if (fs.existsSync(partsDir)) {
+ const partFiles = fs.readdirSync(partsDir)
+ for (const file of partFiles) {
+ fs.unlinkSync(path.join(partsDir, file))
+ }
+ fs.rmdirSync(partsDir)
+ }
+
+ // Invalidate session cache by setting the flag in storage
+ const sessionPath = path.join(
+ storageBase,
+ "session",
+ "project_" + sync.session.find((s) => s.id === props.sessionID)?.projectID || "",
+ `${props.sessionID}.json`,
+ )
+ if (fs.existsSync(sessionPath)) {
+ const sessionData = JSON.parse(fs.readFileSync(sessionPath, "utf-8"))
+ sessionData.cacheInvalidated = true
+ fs.writeFileSync(sessionPath, JSON.stringify(sessionData, null, 2))
+ }
+
+ // Update the UI store to remove the message
+ const messages = sync.message[props.sessionID]
+ const result = Binary.search(messages, messageID, (m) => m.id)
+ if (result.found) {
+ setStore(
+ "message",
+ props.sessionID,
+ produce((draft) => {
+ draft.splice(result.index, 1)
+ }),
+ )
+ }
+
+ // Also remove parts from UI
+ setStore("part", messageID, [])
+
+ // Update session in UI store to reflect cache invalidation
+ const sessionIndex = sync.session.findIndex((s) => s.id === props.sessionID)
+ if (sessionIndex >= 0) {
+ setStore("session", sessionIndex, "cacheInvalidated", true)
+ }
+ } catch (error) {
+ // Silent fail
+ }
+ }
+
+ return (
+ {
+ selectRef = r
+ }}
+ onMove={(option) => props.onMove(option.value)}
+ title="Timeline"
+ options={options()}
+ keybind={[
+ {
+ keybind: { name: "n", ctrl: false, meta: true, shift: false, leader: false },
+ title: "Next user",
+ onTrigger: (option) => {
+ const currentIdx = options().findIndex(opt => opt.value === option.value)
+ for (let i = currentIdx + 1; i < options().length; i++) {
+ const msgID = options()[i].value
+ const msg = sync.message[props.sessionID]?.find(m => m.id === msgID)
+ if (msg && msg.role === "user") {
+ selectRef?.moveToValue(msgID)
+ break
+ }
+ }
+ },
+ },
+ {
+ keybind: { name: "p", ctrl: false, meta: true, shift: false, leader: false },
+ title: "Previous user",
+ onTrigger: (option) => {
+ const currentIdx = options().findIndex(opt => opt.value === option.value)
+ for (let i = currentIdx - 1; i >= 0; i--) {
+ const msgID = options()[i].value
+ const msg = sync.message[props.sessionID]?.find(m => m.id === msgID)
+ if (msg && msg.role === "user") {
+ selectRef?.moveToValue(msgID)
+ break
+ }
+ }
+ },
+ },
+ {
+ keybind: { name: "delete", ctrl: false, meta: false, shift: false, leader: false },
+ title: "Delete",
+ onTrigger: (option) => {
+ handleDelete(option.value)
+ },
+ },
+ {
+ keybind: { name: "insert", ctrl: false, meta: false, shift: false, leader: false },
+ title: "Details",
+ onTrigger: (option) => {
+ const messageID = option.value
+ const message = sync.message[props.sessionID]?.find((m) => m.id === messageID)
+ const parts = sync.part[messageID] ?? []
+
+ if (message && message.role === "assistant") {
+ // Store the current selection before opening details
+ timelineSelection = messageID
+ dialog.push(() => )
+ }
+ },
+ },
+ ]}
+ />
+ )
}
diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx
index 55ab4d54dd4c..beeba3ee0056 100644
--- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx
+++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx
@@ -58,6 +58,7 @@ import type { PromptInfo } from "../../component/prompt/history"
import { DialogConfirm } from "@tui/ui/dialog-confirm"
import { DialogTimeline } from "./dialog-timeline"
import { DialogForkFromTimeline } from "./dialog-fork-from-timeline"
+import { DialogInspect } from "./dialog-inspect"
import { DialogSessionRename } from "../../component/dialog-session-rename"
import { Sidebar } from "./sidebar"
import { Flag } from "@/flag/flag"
@@ -1092,6 +1093,11 @@ export function Session() {
last={lastAssistant()?.id === message.id}
message={message as AssistantMessage}
parts={sync.data.part[message.id] ?? []}
+ next={
+ messages()
+ .slice(index() + 1)
+ .find((x) => x.role === "assistant") as AssistantMessage | undefined
+ }
/>
@@ -1257,11 +1263,13 @@ function UserMessage(props: {
)
}
-function AssistantMessage(props: { message: AssistantMessage; parts: Part[]; last: boolean }) {
+function AssistantMessage(props: { message: AssistantMessage; parts: Part[]; last: boolean; next?: AssistantMessage }) {
const local = useLocal()
const { theme } = useTheme()
const sync = useSync()
const messages = createMemo(() => sync.data.message[props.message.sessionID] ?? [])
+ const dialog = useDialog()
+ const [hover, setHover] = createSignal(false)
const final = createMemo(() => {
return props.message.finish && !["tool-calls", "unknown"].includes(props.message.finish)
@@ -1276,63 +1284,89 @@ function AssistantMessage(props: { message: AssistantMessage; parts: Part[]; las
})
return (
- <>
+
- {(part, index) => {
- const component = createMemo(() => PART_MAPPING[part.type as keyof typeof PART_MAPPING])
- return (
-
-
+ {(part, index) => {
+ const component = createMemo(() => PART_MAPPING[part.type as keyof typeof PART_MAPPING])
+ return (
+
+
+
+ )
+ }}
+
+
+
+
+
+ {" "}
+ {(props.message.tokens.input + (props.message.tokens.cache?.read ?? 0)).toLocaleString()} token
+ {props.next?.tokens
+ ? ` (+${(
+ props.next.tokens.input +
+ (props.next.tokens.cache?.read ?? 0) -
+ (props.message.tokens.input + (props.message.tokens.cache?.read ?? 0))
+ ).toLocaleString()})`
+ : ""}
+
+
+
+ setHover(true)}
+ onMouseOut={() => setHover(false)}
+ onMouseUp={() => dialog.replace(() => )}
+ backgroundColor={hover() ? theme.backgroundElement : undefined}
+ >
+ [?]
+
+
+
+
+
+ {props.message.error?.data.message}
+
+
+
+
+
+
+
+ ▣{" "}
+ {" "}
+ {Locale.titlecase(props.message.mode)}
+ · {props.message.modelID}
+
+ · {Locale.duration(duration())}
- )
- }}
-
-
-
- {props.message.error?.data.message}
+
+ · interrupted
+
+
-
-
-
-
-
-
- ▣{" "}
- {" "}
- {Locale.titlecase(props.message.mode)}
- · {props.message.modelID}
-
- · {Locale.duration(duration())}
-
-
- · interrupted
-
-
-
-
-
- >
+
+
+
)
}
diff --git a/packages/opencode/src/cli/cmd/tui/ui/dialog-select.tsx b/packages/opencode/src/cli/cmd/tui/ui/dialog-select.tsx
index 151f73cf7c0a..15d650de185a 100644
--- a/packages/opencode/src/cli/cmd/tui/ui/dialog-select.tsx
+++ b/packages/opencode/src/cli/cmd/tui/ui/dialog-select.tsx
@@ -39,12 +39,15 @@ export interface DialogSelectOption {
disabled?: boolean
bg?: RGBA
gutter?: JSX.Element
- onSelect?: (ctx: DialogContext) => void
+ titleColor?: RGBA
+ footerColor?: RGBA
+ onSelect?: (ctx: DialogContext, trigger?: "prompt") => void
}
export type DialogSelectRef = {
filter: string
filtered: DialogSelectOption[]
+ moveToValue: (value: T) => void
}
export function DialogSelect(props: DialogSelectProps) {
@@ -224,6 +227,12 @@ export function DialogSelect(props: DialogSelectProps) {
get filtered() {
return filtered()
},
+ moveToValue(value: T) {
+ const index = flat().findIndex((opt) => isDeepEqual(opt.value, value))
+ if (index >= 0) {
+ moveTo(index, true)
+ }
+ },
}
props.ref?.(ref)
@@ -326,6 +335,8 @@ export function DialogSelect(props: DialogSelectProps) {
active={active()}
current={current()}
gutter={option.gutter}
+ titleColor={option.titleColor}
+ footerColor={option.footerColor}
/>
)
@@ -361,6 +372,8 @@ function Option(props: {
current?: boolean
footer?: JSX.Element | string
gutter?: JSX.Element
+ titleColor?: RGBA
+ footerColor?: RGBA
onMouseOver?: () => void
}) {
const { theme } = useTheme()
@@ -380,20 +393,20 @@ function Option(props: {
- {Locale.truncate(props.title, 61)}
+ {Locale.truncate(props.title, 60)}
{props.description}
- {props.footer}
+ {props.footer}
>
diff --git a/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx b/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx
index 8cebd9cba54d..c611cf39d625 100644
--- a/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx
+++ b/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx
@@ -9,7 +9,7 @@ import { Selection } from "@tui/util/selection"
export function Dialog(
props: ParentProps<{
- size?: "medium" | "large"
+ size?: "medium" | "large" | "xlarge"
onClose: () => void
}>,
) {
@@ -35,7 +35,7 @@ export function Dialog(
height={dimensions().height}
alignItems="center"
position="absolute"
- paddingTop={dimensions().height / 4}
+ paddingTop={props.size === "xlarge" ? 2 : dimensions().height / 4}
left={0}
top={0}
backgroundColor={RGBA.fromInts(0, 0, 0, 150)}
@@ -45,7 +45,8 @@ export function Dialog(
dismiss = false
e.stopPropagation()
}}
- width={props.size === "large" ? 80 : 60}
+ width={props.size === "xlarge" ? 120 : props.size === "large" ? 80 : 60}
+ height={props.size === "xlarge" ? dimensions().height - 4 : undefined}
maxWidth={dimensions().width - 2}
backgroundColor={theme.backgroundPanel}
paddingTop={1}
@@ -62,7 +63,7 @@ function init() {
element: JSX.Element
onClose?: () => void
}[],
- size: "medium" as "medium" | "large",
+ size: "medium" as "medium" | "large" | "xlarge",
})
const renderer = useRenderer()
@@ -126,13 +127,26 @@ function init() {
},
])
},
+ push(input: any, onClose?: () => void) {
+ if (store.stack.length === 0) {
+ focus = renderer.currentFocusedRenderable
+ focus?.blur()
+ }
+ setStore("stack", [
+ ...store.stack,
+ {
+ element: input,
+ onClose,
+ },
+ ])
+ },
get stack() {
return store.stack
},
get size() {
return store.size
},
- setSize(size: "medium" | "large") {
+ setSize(size: "medium" | "large" | "xlarge") {
setStore("size", size)
},
}
diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts
index 759dab440d40..ff4327e1b14f 100644
--- a/packages/opencode/src/provider/transform.ts
+++ b/packages/opencode/src/provider/transform.ts
@@ -2,6 +2,7 @@ import type { ModelMessage } from "ai"
import { mergeDeep, unique } from "remeda"
import type { JSONSchema7 } from "@ai-sdk/provider"
import type { JSONSchema } from "zod/v4/core"
+import path from "path"
import type { Provider } from "./provider"
import type { ModelsDev } from "./models"
import { iife } from "@/util/iife"
@@ -49,6 +50,35 @@ export namespace ProviderTransform {
model: Provider.Model,
options: Record,
): ModelMessage[] {
+ // Strip openai itemId metadata following what codex does
+ if (model.api.npm === "@ai-sdk/openai" || options.store === false) {
+ msgs = msgs.map((msg) => {
+ if (msg.providerOptions) {
+ for (const options of Object.values(msg.providerOptions)) {
+ if (options && typeof options === "object") {
+ delete options["itemId"]
+ delete options["reasoningEncryptedContent"]
+ }
+ }
+ }
+ if (!Array.isArray(msg.content)) {
+ return msg
+ }
+ const content = msg.content.map((part) => {
+ if (part.providerOptions) {
+ for (const options of Object.values(part.providerOptions)) {
+ if (options && typeof options === "object") {
+ delete options["itemId"]
+ delete options["reasoningEncryptedContent"]
+ }
+ }
+ }
+ return part
+ })
+ return { ...msg, content } as typeof msg
+ })
+ }
+
// Anthropic rejects messages with empty content - filter out empty string messages
// and remove empty text/reasoning parts from array content
if (model.api.npm === "@ai-sdk/anthropic") {
@@ -171,7 +201,34 @@ export namespace ProviderTransform {
return msgs
}
- function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage[] {
+ async function applyCaching(msgs: ModelMessage[], model: Provider.Model, sessionID?: string): Promise {
+ // Skip caching if session cache was invalidated (e.g., message deletion)
+ if (sessionID) {
+ const { Global } = await import("@/global")
+ const { Session } = await import("../session")
+ const session = await Session.get(sessionID).catch(() => null)
+ if (session) {
+ const sessionPath = path.join(
+ Global.Path.data,
+ "storage",
+ "session",
+ `project_${session.projectID}`,
+ `${sessionID}.json`
+ )
+ try {
+ const sessionData = await Bun.file(sessionPath).json()
+ if (sessionData.cacheInvalidated) {
+ // Clear flag and return without cache control markers
+ delete sessionData.cacheInvalidated
+ await Bun.write(sessionPath, JSON.stringify(sessionData, null, 2))
+ return msgs
+ }
+ } catch {
+ // File doesn't exist or can't be read, continue with caching
+ }
+ }
+ }
+
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
@@ -249,7 +306,12 @@ export namespace ProviderTransform {
})
}
- export function message(msgs: ModelMessage[], model: Provider.Model, options: Record) {
+ export async function message(
+ msgs: ModelMessage[],
+ model: Provider.Model,
+ options: Record = {},
+ sessionID?: string,
+ ) {
msgs = unsupportedParts(msgs, model)
msgs = normalizeMessages(msgs, model, options)
if (
@@ -261,7 +323,7 @@ export namespace ProviderTransform {
model.api.npm === "@ai-sdk/anthropic") &&
model.api.npm !== "@ai-sdk/gateway"
) {
- msgs = applyCaching(msgs, model)
+ msgs = await applyCaching(msgs, model, sessionID)
}
// Remap providerOptions keys from stored providerID to expected SDK key
diff --git a/packages/opencode/src/session/index.ts b/packages/opencode/src/session/index.ts
index b07a049c80d7..7f9a5eaa0442 100644
--- a/packages/opencode/src/session/index.ts
+++ b/packages/opencode/src/session/index.ts
@@ -148,6 +148,7 @@ export namespace Session {
diff: z.string().optional(),
})
.optional(),
+ cacheInvalidated: z.boolean().optional(),
})
.meta({
ref: "Session",
diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts
index fa880391276c..46e26e3a5ba7 100644
--- a/packages/opencode/src/session/llm.ts
+++ b/packages/opencode/src/session/llm.ts
@@ -242,7 +242,7 @@ export namespace LLM {
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
- args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
+ args.params.prompt = await ProviderTransform.message(args.params.prompt, input.model, input.sessionID)
}
return args.params
},
diff --git a/packages/opencode/src/util/token.ts b/packages/opencode/src/util/token.ts
index cee5adc37713..dcd8c4c97cc8 100644
--- a/packages/opencode/src/util/token.ts
+++ b/packages/opencode/src/util/token.ts
@@ -4,4 +4,10 @@ export namespace Token {
export function estimate(input: string) {
return Math.max(0, Math.round((input || "").length / CHARS_PER_TOKEN))
}
+
+ export function estimateImage(urlOrData: string): number {
+ // Estimate tokens for image data/URLs since providers don't return image token counts
+ // Uses string length as proxy: data URLs contain base64 image data, file paths are small
+ return Math.max(100, Math.round(urlOrData.length / 170))
+ }
}
diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts
index 3494cb56fdd0..0735c8cfe6b1 100644
--- a/packages/opencode/test/provider/transform.test.ts
+++ b/packages/opencode/test/provider/transform.test.ts
@@ -621,7 +621,7 @@ describe("ProviderTransform.schema - gemini non-object properties removal", () =
})
describe("ProviderTransform.message - DeepSeek reasoning content", () => {
- test("DeepSeek with tool calls includes reasoning_content in providerOptions", () => {
+ test("DeepSeek with tool calls includes reasoning_content in providerOptions", async () => {
const msgs = [
{
role: "assistant",
@@ -637,7 +637,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
},
] as any[]
- const result = ProviderTransform.message(
+ const result = await ProviderTransform.message(
msgs,
{
id: "deepseek/deepseek-chat",
@@ -688,7 +688,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
expect(result[0].providerOptions?.openaiCompatible?.reasoning_content).toBe("Let me think about this...")
})
- test("Non-DeepSeek providers leave reasoning content unchanged", () => {
+ test("Non-DeepSeek providers leave reasoning content unchanged", async () => {
const msgs = [
{
role: "assistant",
@@ -699,7 +699,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
},
] as any[]
- const result = ProviderTransform.message(
+ const result = await ProviderTransform.message(
msgs,
{
id: "openai/gpt-4",
@@ -777,7 +777,7 @@ describe("ProviderTransform.message - empty image handling", () => {
headers: {},
} as any
- test("should replace empty base64 image with error text", () => {
+ test("should replace empty base64 image with error text", async () => {
const msgs = [
{
role: "user",
@@ -788,7 +788,7 @@ describe("ProviderTransform.message - empty image handling", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, mockModel, {})
+ const result = await ProviderTransform.message(msgs, mockModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(2)
@@ -799,7 +799,7 @@ describe("ProviderTransform.message - empty image handling", () => {
})
})
- test("should keep valid base64 images unchanged", () => {
+ test("should keep valid base64 images unchanged", async () => {
const validBase64 =
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
const msgs = [
@@ -812,7 +812,7 @@ describe("ProviderTransform.message - empty image handling", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, mockModel, {})
+ const result = await ProviderTransform.message(msgs, mockModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(2)
@@ -820,7 +820,7 @@ describe("ProviderTransform.message - empty image handling", () => {
expect(result[0].content[1]).toEqual({ type: "image", image: `data:image/png;base64,${validBase64}` })
})
- test("should handle mixed valid and empty images", () => {
+ test("should handle mixed valid and empty images", async () => {
const validBase64 =
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
const msgs = [
@@ -834,7 +834,7 @@ describe("ProviderTransform.message - empty image handling", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, mockModel, {})
+ const result = await ProviderTransform.message(msgs, mockModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(3)
@@ -880,21 +880,21 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
headers: {},
} as any
- test("filters out messages with empty string content", () => {
+ test("filters out messages with empty string content", async () => {
const msgs = [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "" },
{ role: "user", content: "World" },
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(2)
expect(result[0].content).toBe("Hello")
expect(result[1].content).toBe("World")
})
- test("filters out empty text parts from array content", () => {
+ test("filters out empty text parts from array content", async () => {
const msgs = [
{
role: "assistant",
@@ -906,14 +906,14 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
},
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(1)
expect(result[0].content[0]).toEqual({ type: "text", text: "Hello" })
})
- test("filters out empty reasoning parts from array content", () => {
+ test("filters out empty reasoning parts from array content", async () => {
const msgs = [
{
role: "assistant",
@@ -925,14 +925,14 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
},
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(1)
expect(result[0].content[0]).toEqual({ type: "text", text: "Answer" })
})
- test("removes entire message when all parts are empty", () => {
+ test("removes entire message when all parts are empty", async () => {
const msgs = [
{ role: "user", content: "Hello" },
{
@@ -945,14 +945,14 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
{ role: "user", content: "World" },
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(2)
expect(result[0].content).toBe("Hello")
expect(result[1].content).toBe("World")
})
- test("keeps non-text/reasoning parts even if text parts are empty", () => {
+ test("keeps non-text/reasoning parts even if text parts are empty", async () => {
const msgs = [
{
role: "assistant",
@@ -963,7 +963,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
},
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(1)
@@ -975,7 +975,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
})
})
- test("keeps messages with valid text alongside empty parts", () => {
+ test("keeps messages with valid text alongside empty parts", async () => {
const msgs = [
{
role: "assistant",
@@ -987,7 +987,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
},
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {})
+ const result = await ProviderTransform.message(msgs, anthropicModel, {})
expect(result).toHaveLength(1)
expect(result[0].content).toHaveLength(2)
@@ -995,7 +995,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
expect(result[0].content[1]).toEqual({ type: "text", text: "Result" })
})
- test("does not filter for non-anthropic providers", () => {
+ test("does not filter for non-anthropic providers", async () => {
const openaiModel = {
...anthropicModel,
providerID: "openai",
@@ -1014,7 +1014,7 @@ describe("ProviderTransform.message - anthropic empty content filtering", () =>
},
] as any[]
- const result = ProviderTransform.message(msgs, openaiModel, {})
+ const result = await ProviderTransform.message(msgs, openaiModel, {})
expect(result).toHaveLength(2)
expect(result[0].content).toBe("")
@@ -1048,7 +1048,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
headers: {},
} as any
- test("preserves itemId and reasoningEncryptedContent when store=false", () => {
+ test("strips itemId and reasoningEncryptedContent when store=false", async () => {
const msgs = [
{
role: "assistant",
@@ -1076,14 +1076,14 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
+ const result = (await ProviderTransform.message(msgs, openaiModel, { store: false })) as any[]
expect(result).toHaveLength(1)
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
- expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
+ expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
})
- test("preserves itemId and reasoningEncryptedContent when store=false even when not openai", () => {
+ test("strips itemId and reasoningEncryptedContent when store=false even when not openai", async () => {
const zenModel = {
...openaiModel,
providerID: "zen",
@@ -1115,14 +1115,14 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, zenModel, { store: false }) as any[]
+ const result = (await ProviderTransform.message(msgs, zenModel, { store: false })) as any[]
expect(result).toHaveLength(1)
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
- expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
+ expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
})
- test("preserves other openai options including itemId", () => {
+ test("preserves other openai options when stripping itemId", async () => {
const msgs = [
{
role: "assistant",
@@ -1141,13 +1141,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
+ const result = (await ProviderTransform.message(msgs, openaiModel, { store: false })) as any[]
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.otherOption).toBe("value")
})
- test("preserves metadata for openai package when store is true", () => {
+ test("strips metadata for openai package even when store is true", async () => {
const msgs = [
{
role: "assistant",
@@ -1165,13 +1165,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- // openai package preserves itemId regardless of store value
- const result = ProviderTransform.message(msgs, openaiModel, { store: true }) as any[]
+ // openai package always strips itemId regardless of store value
+ const result = (await ProviderTransform.message(msgs, openaiModel, { store: true })) as any[]
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
})
- test("preserves metadata for non-openai packages when store is false", () => {
+ test("strips metadata for non-openai packages when store is false", async () => {
const anthropicModel = {
...openaiModel,
providerID: "anthropic",
@@ -1198,13 +1198,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- // store=false preserves metadata for non-openai packages
- const result = ProviderTransform.message(msgs, anthropicModel, { store: false }) as any[]
+ // store=false triggers stripping even for non-openai packages
+ const result = (await ProviderTransform.message(msgs, anthropicModel, { store: false })) as any[]
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
})
- test("preserves metadata using providerID key when store is false", () => {
+ test("strips metadata using providerID key when store is false", async () => {
const opencodeModel = {
...openaiModel,
providerID: "opencode",
@@ -1232,13 +1232,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
+ const result = (await ProviderTransform.message(msgs, opencodeModel, { store: false })) as any[]
- expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_123")
+ expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.opencode?.otherOption).toBe("value")
})
- test("preserves itemId across all providerOptions keys", () => {
+ test("strips itemId across all providerOptions keys", async () => {
const opencodeModel = {
...openaiModel,
providerID: "opencode",
@@ -1270,17 +1270,17 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
+ const result = (await ProviderTransform.message(msgs, opencodeModel, { store: false })) as any[]
- expect(result[0].providerOptions?.openai?.itemId).toBe("msg_root")
- expect(result[0].providerOptions?.opencode?.itemId).toBe("msg_opencode")
- expect(result[0].providerOptions?.extra?.itemId).toBe("msg_extra")
- expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_openai_part")
- expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_opencode_part")
- expect(result[0].content[0].providerOptions?.extra?.itemId).toBe("msg_extra_part")
+ expect(result[0].providerOptions?.openai?.itemId).toBeUndefined()
+ expect(result[0].providerOptions?.opencode?.itemId).toBeUndefined()
+ expect(result[0].providerOptions?.extra?.itemId).toBeUndefined()
+ expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
+ expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
+ expect(result[0].content[0].providerOptions?.extra?.itemId).toBeUndefined()
})
- test("does not strip metadata for non-openai packages when store is not false", () => {
+ test("does not strip metadata for non-openai packages when store is not false", async () => {
const anthropicModel = {
...openaiModel,
providerID: "anthropic",
@@ -1307,7 +1307,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
- const result = ProviderTransform.message(msgs, anthropicModel, {}) as any[]
+ const result = (await ProviderTransform.message(msgs, anthropicModel, {})) as any[]
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
})
@@ -1340,7 +1340,7 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
headers: {},
}) as any
- test("azure keeps 'azure' key and does not remap to 'openai'", () => {
+ test("azure keeps 'azure' key and does not remap to 'openai'", async () => {
const model = createModel("azure", "@ai-sdk/azure")
const msgs = [
{
@@ -1352,13 +1352,13 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {})
+ const result = await ProviderTransform.message(msgs, model, {})
expect(result[0].providerOptions?.azure).toEqual({ someOption: "value" })
expect(result[0].providerOptions?.openai).toBeUndefined()
})
- test("copilot remaps providerID to 'copilot' key", () => {
+ test("copilot remaps providerID to 'copilot' key", async () => {
const model = createModel("github-copilot", "@ai-sdk/github-copilot")
const msgs = [
{
@@ -1370,13 +1370,13 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {})
+ const result = await ProviderTransform.message(msgs, model, {})
expect(result[0].providerOptions?.copilot).toEqual({ someOption: "value" })
expect(result[0].providerOptions?.["github-copilot"]).toBeUndefined()
})
- test("bedrock remaps providerID to 'bedrock' key", () => {
+ test("bedrock remaps providerID to 'bedrock' key", async () => {
const model = createModel("my-bedrock", "@ai-sdk/amazon-bedrock")
const msgs = [
{
@@ -1388,7 +1388,7 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {})
+ const result = await ProviderTransform.message(msgs, model, {})
expect(result[0].providerOptions?.bedrock).toEqual({ someOption: "value" })
expect(result[0].providerOptions?.["my-bedrock"]).toBeUndefined()
@@ -1396,7 +1396,7 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
})
describe("ProviderTransform.message - claude w/bedrock custom inference profile", () => {
- test("adds cachePoint", () => {
+ test("adds cachePoint", async () => {
const model = {
id: "amazon-bedrock/custom-claude-sonnet-4.5",
providerID: "amazon-bedrock",
@@ -1418,7 +1418,7 @@ describe("ProviderTransform.message - claude w/bedrock custom inference profile"
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {})
+ const result = await ProviderTransform.message(msgs, model, {})
expect(result[0].providerOptions?.bedrock).toEqual(
expect.objectContaining({
@@ -1458,7 +1458,7 @@ describe("ProviderTransform.message - cache control on gateway", () => {
...overrides,
}) as any
- test("gateway does not set cache control for anthropic models", () => {
+ test("gateway does not set cache control for anthropic models", async () => {
const model = createModel()
const msgs = [
{
@@ -1471,13 +1471,13 @@ describe("ProviderTransform.message - cache control on gateway", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {}) as any[]
+ const result = (await ProviderTransform.message(msgs, model, {})) as any[]
expect(result[0].content[0].providerOptions).toBeUndefined()
expect(result[0].providerOptions).toBeUndefined()
})
- test("non-gateway anthropic keeps existing cache control behavior", () => {
+ test("non-gateway anthropic keeps existing cache control behavior", async () => {
const model = createModel({
providerID: "anthropic",
api: {
@@ -1497,7 +1497,7 @@ describe("ProviderTransform.message - cache control on gateway", () => {
},
] as any[]
- const result = ProviderTransform.message(msgs, model, {}) as any[]
+ const result = (await ProviderTransform.message(msgs, model, {})) as any[]
expect(result[0].providerOptions).toEqual({
anthropic: {
diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts
index efb7e202e120..1b60ccec467d 100644
--- a/packages/sdk/js/src/v2/gen/types.gen.ts
+++ b/packages/sdk/js/src/v2/gen/types.gen.ts
@@ -834,6 +834,7 @@ export type Session = {
snapshot?: string
diff?: string
}
+ cacheInvalidated?: boolean
}
export type EventSessionCreated = {
diff --git a/packages/ui/src/components/message-part.tsx b/packages/ui/src/components/message-part.tsx
index 3a19bf7d2b89..aff97c7293fc 100644
--- a/packages/ui/src/components/message-part.tsx
+++ b/packages/ui/src/components/message-part.tsx
@@ -93,6 +93,7 @@ function DiagnosticsDisplay(props: { diagnostics: Diagnostic[] }): JSX.Element {
export interface MessageProps {
message: MessageType
parts: PartType[]
+ id?: string
showAssistantCopyPartID?: string | null
interrupted?: boolean
}
@@ -464,6 +465,7 @@ export function Message(props: MessageProps) {
)}
@@ -473,6 +475,7 @@ export function Message(props: MessageProps) {
)}
@@ -484,6 +487,7 @@ export function Message(props: MessageProps) {
export function AssistantMessageDisplay(props: {
message: AssistantMessage
parts: PartType[]
+ id?: string
showAssistantCopyPartID?: string | null
}) {
const grouped = createMemo(() => {
@@ -530,37 +534,39 @@ export function AssistantMessageDisplay(props: {
})
return (
-
- {(key) => {
- const item = createMemo(() => grouped().items[key])
- const ctx = createMemo(() => {
- const value = item()
- if (!value) return
- if (value.type !== "context") return
- return value
- })
- const part = createMemo(() => {
- const value = item()
- if (!value) return
- if (value.type !== "part") return
- return value
- })
- return (
- <>
- {(entry) => }
-
- {(entry) => (
-
- )}
-
- >
- )
- }}
-
+
+
+ {(key) => {
+ const item = createMemo(() => grouped().items[key])
+ const ctx = createMemo(() => {
+ const value = item()
+ if (!value) return
+ if (value.type !== "context") return
+ return value
+ })
+ const part = createMemo(() => {
+ const value = item()
+ if (!value) return
+ if (value.type !== "part") return
+ return value
+ })
+ return (
+ <>
+ {(entry) => }
+
+ {(entry) => (
+
+ )}
+
+ >
+ )
+ }}
+
+
)
}
@@ -642,7 +648,7 @@ function ContextToolGroup(props: { parts: ToolPart[]; busy?: boolean }) {
)
}
-export function UserMessageDisplay(props: { message: UserMessage; parts: PartType[]; interrupted?: boolean }) {
+export function UserMessageDisplay(props: { message: UserMessage; parts: PartType[]; id?: string; interrupted?: boolean }) {
const data = useData()
const dialog = useDialog()
const i18n = useI18n()
@@ -714,7 +720,7 @@ export function UserMessageDisplay(props: { message: UserMessage; parts: PartTyp
}
return (
-