diff --git a/packages/app/src/custom-elements.d.ts b/packages/app/src/custom-elements.d.ts index e4ea0d6cebda..49ec4449fa20 120000 --- a/packages/app/src/custom-elements.d.ts +++ b/packages/app/src/custom-elements.d.ts @@ -1 +1,17 @@ -../../ui/src/custom-elements.d.ts \ No newline at end of file +import { DIFFS_TAG_NAME } from "@pierre/diffs" + +/** + * TypeScript declaration for the custom element. + * This tells TypeScript that is a valid JSX element in SolidJS. + * Required for using the @pierre/diffs web component in .tsx files. + */ + +declare module "solid-js" { + namespace JSX { + interface IntrinsicElements { + [DIFFS_TAG_NAME]: HTMLAttributes + } + } +} + +export {} diff --git a/packages/enterprise/src/custom-elements.d.ts b/packages/enterprise/src/custom-elements.d.ts index e4ea0d6cebda..49ec4449fa20 120000 --- a/packages/enterprise/src/custom-elements.d.ts +++ b/packages/enterprise/src/custom-elements.d.ts @@ -1 +1,17 @@ -../../ui/src/custom-elements.d.ts \ No newline at end of file +import { DIFFS_TAG_NAME } from "@pierre/diffs" + +/** + * TypeScript declaration for the custom element. + * This tells TypeScript that is a valid JSX element in SolidJS. + * Required for using the @pierre/diffs web component in .tsx files. + */ + +declare module "solid-js" { + namespace JSX { + interface IntrinsicElements { + [DIFFS_TAG_NAME]: HTMLAttributes + } + } +} + +export {} diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 7fb3166284be..4153a6066e97 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -136,6 +136,7 @@ export namespace Provider { "@ai-sdk/vercel": createVercel, "gitlab-ai-provider": createGitLab, "@ai-sdk/github-copilot": createGitHubCopilotOpenAICompatible, + lmstudio: createOpenAICompatible, } type CustomModelLoader = (sdk: any, modelID: string, options?: Record) => Promise @@ -154,6 +155,76 @@ export namespace Provider { } const CUSTOM_LOADERS: Record = { + async lmstudio() { + const baseURL = Env.get("LM_STUDIO_URL") || "http://127.0.0.1:1234/v1" + return { + autoload: true, + options: { baseURL, apiKey: "lm-studio" }, + async discoverModels() { + try { + const res = await fetch(`${baseURL}/models`) + if (!res.ok) return {} + const data = (await res.json()) as any + const models: Record = {} + for (const m of data.data || []) { + if (m.id.includes("embedding")) continue // skip embeddings + + const prettyName = m.id.split("/").pop() || m.id + + models[`lmstudio/${m.id}`] = { + id: m.id, + name: `LM Studio: ${prettyName}`, + providerID: "lmstudio", + family: "lmstudio-local", + api: { + id: m.id, + url: baseURL, + npm: "@ai-sdk/openai-compatible", + }, + status: "active", + headers: {}, + options: {}, + cost: { + input: 0, + output: 0, + cache: { read: 0, write: 0 }, + }, + limit: { + context: 32000, + output: 4096, + }, + capabilities: { + temperature: true, + reasoning: false, + attachment: false, + toolcall: true, + interleaved: false, + input: { + text: true, + audio: false, + image: false, + video: false, + pdf: false, + }, + output: { + text: true, + audio: false, + image: false, + video: false, + pdf: false, + }, + }, + release_date: "2025-01-01", + variants: {}, + } + } + return models + } catch (e) { + return {} // Return empty if LM Studio is not currently running + } + }, + } + }, async anthropic() { return { autoload: false,