From adc48c013811b226b5b4bac2b4b091538ebf39af Mon Sep 17 00:00:00 2001 From: Daniel Riccio Date: Tue, 5 Aug 2025 13:03:46 -0500 Subject: [PATCH] feat: add GPT-OSS 120b and 20b models to Groq provider --- packages/types/src/providers/groq.ts | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/types/src/providers/groq.ts b/packages/types/src/providers/groq.ts index 2eac1f954a3..cab0c699000 100644 --- a/packages/types/src/providers/groq.ts +++ b/packages/types/src/providers/groq.ts @@ -11,6 +11,8 @@ export type GroqModelId = | "qwen/qwen3-32b" | "deepseek-r1-distill-llama-70b" | "moonshotai/kimi-k2-instruct" + | "openai/gpt-oss-120b" + | "openai/gpt-oss-20b" export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile @@ -97,4 +99,24 @@ export const groqModels = { outputPrice: 3.0, description: "Moonshot AI Kimi K2 Instruct 1T model, 128K context.", }, + "openai/gpt-oss-120b": { + maxTokens: 32766, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0.15, + outputPrice: 0.75, + description: + "GPT-OSS 120B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 128 experts.", + }, + "openai/gpt-oss-20b": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0.1, + outputPrice: 0.5, + description: + "GPT-OSS 20B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 32 experts.", + }, } as const satisfies Record