From 71356237ee73676841322e5c7f3fb518ed0dd1c9 Mon Sep 17 00:00:00 2001 From: Jason Holt Date: Tue, 24 Feb 2026 09:59:31 +0100 Subject: [PATCH 1/2] fix(llm): pass default_query to OpenAI client --- src/kimi_cli/config.py | 2 ++ src/kimi_cli/llm.py | 13 +++++++++++- tests/core/test_create_llm.py | 40 +++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 1 deletion(-) diff --git a/src/kimi_cli/config.py b/src/kimi_cli/config.py index b135958ca..0fa4675d8 100644 --- a/src/kimi_cli/config.py +++ b/src/kimi_cli/config.py @@ -44,6 +44,8 @@ class LLMProvider(BaseModel): """Environment variables to set before creating the provider instance""" custom_headers: dict[str, str] | None = None """Custom headers to include in API requests""" + default_query: dict[str, object] | None = None + """Default query parameters to include in API requests""" oauth: OAuthRef | None = None """OAuth credential reference (do not store tokens here).""" diff --git a/src/kimi_cli/llm.py b/src/kimi_cli/llm.py index a72644c05..8adc5336d 100644 --- a/src/kimi_cli/llm.py +++ b/src/kimi_cli/llm.py @@ -4,7 +4,7 @@ import os from dataclasses import dataclass from pathlib import Path -from typing import TYPE_CHECKING, Literal, cast, get_args +from typing import TYPE_CHECKING, Any, Literal, cast, get_args from kosong.chat_provider import ChatProvider from pydantic import SecretStr @@ -103,6 +103,15 @@ def _kimi_default_headers(provider: LLMProvider, oauth: OAuthManager | None) -> return headers +def _openai_client_kwargs(provider: LLMProvider) -> dict[str, Any]: + client_kwargs: dict[str, Any] = {} + if provider.custom_headers: + client_kwargs["default_headers"] = provider.custom_headers + if provider.default_query: + client_kwargs["default_query"] = provider.default_query + return client_kwargs + + def create_llm( provider: LLMProvider, model: LLMModel, @@ -152,6 +161,7 @@ def create_llm( model=model.model, base_url=provider.base_url, api_key=resolved_api_key, + **_openai_client_kwargs(provider), ) case "openai_responses": from kosong.contrib.chat_provider.openai_responses import OpenAIResponses @@ -160,6 +170,7 @@ def create_llm( model=model.model, base_url=provider.base_url, api_key=resolved_api_key, + **_openai_client_kwargs(provider), ) case "anthropic": from kosong.contrib.chat_provider.anthropic import Anthropic diff --git a/tests/core/test_create_llm.py b/tests/core/test_create_llm.py index 27bb54d4f..7f9c124fc 100644 --- a/tests/core/test_create_llm.py +++ b/tests/core/test_create_llm.py @@ -3,6 +3,8 @@ from inline_snapshot import snapshot from kosong.chat_provider.echo import EchoChatProvider from kosong.chat_provider.kimi import Kimi +from kosong.contrib.chat_provider.openai_legacy import OpenAILegacy +from kosong.contrib.chat_provider.openai_responses import OpenAIResponses from pydantic import SecretStr from kimi_cli.config import LLMModel, LLMProvider @@ -93,3 +95,41 @@ def test_create_llm_requires_base_url_for_kimi(): model = LLMModel(provider="kimi", model="kimi-base", max_context_size=4096) assert create_llm(provider, model) is None + + +def test_create_llm_openai_legacy_passes_client_kwargs(): + provider = LLMProvider( + type="openai_legacy", + base_url="https://openai.example/v1", + api_key=SecretStr("test-key"), + custom_headers={"x-test": "header"}, + default_query={"api-version": "2024-05-01-preview"}, + ) + model = LLMModel(provider="openai", model="gpt-4o", max_context_size=4096) + + llm = create_llm(provider, model) + assert llm is not None + assert isinstance(llm.chat_provider, OpenAILegacy) + assert llm.chat_provider._client_kwargs["default_headers"] == {"x-test": "header"} + assert llm.chat_provider._client_kwargs["default_query"] == { + "api-version": "2024-05-01-preview" + } + + +def test_create_llm_openai_responses_passes_client_kwargs(): + provider = LLMProvider( + type="openai_responses", + base_url="https://openai.example/v1", + api_key=SecretStr("test-key"), + custom_headers={"x-test": "header"}, + default_query={"api-version": "2024-05-01-preview"}, + ) + model = LLMModel(provider="openai-responses", model="gpt-4o", max_context_size=4096) + + llm = create_llm(provider, model) + assert llm is not None + assert isinstance(llm.chat_provider, OpenAIResponses) + assert llm.chat_provider._client_kwargs["default_headers"] == {"x-test": "header"} + assert llm.chat_provider._client_kwargs["default_query"] == { + "api-version": "2024-05-01-preview" + } From 992663c27495ba1b9b0aedd49148082d47b449e1 Mon Sep 17 00:00:00 2001 From: Jason Holt Date: Tue, 24 Feb 2026 10:28:16 +0100 Subject: [PATCH 2/2] fix(config): restrict default_query values --- src/kimi_cli/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/kimi_cli/config.py b/src/kimi_cli/config.py index 0fa4675d8..98dd4e765 100644 --- a/src/kimi_cli/config.py +++ b/src/kimi_cli/config.py @@ -44,8 +44,8 @@ class LLMProvider(BaseModel): """Environment variables to set before creating the provider instance""" custom_headers: dict[str, str] | None = None """Custom headers to include in API requests""" - default_query: dict[str, object] | None = None - """Default query parameters to include in API requests""" + default_query: dict[str, str | int | float | bool] | None = None + """Default query parameters to include in API requests (JSON-serializable scalars only).""" oauth: OAuthRef | None = None """OAuth credential reference (do not store tokens here)."""