Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion python/packages/foundry/agent_framework_foundry/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,9 @@ def __init__(
self._should_close_client = True

# Get OpenAI client from project
async_client = self.project_client.get_openai_client()
async_client = self.project_client.get_openai_client(
default_headers=dict(default_headers) if default_headers else None,
)

super().__init__(
async_client=async_client,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,9 @@ def __init__(

super().__init__(
model=resolved_model,
async_client=project_client.get_openai_client(),
async_client=project_client.get_openai_client(
default_headers=dict(default_headers) if default_headers else None,
),
default_headers=default_headers,
instruction_role=instruction_role,
compaction_strategy=compaction_strategy,
Expand Down
29 changes: 29 additions & 0 deletions python/packages/foundry/tests/foundry/test_foundry_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,35 @@ def test_raw_foundry_agent_chat_client_init_uses_explicit_parameters() -> None:
assert all(parameter.kind != inspect.Parameter.VAR_KEYWORD for parameter in signature.parameters.values())


def test_raw_foundry_agent_chat_client_init_forwards_default_headers_to_openai_client() -> None:
"""default_headers must be forwarded into get_openai_client() so the underlying
AsyncOpenAI client sends them on every outbound request."""
custom_headers = {"x-custom-header": "test-value"}
mock_project = MagicMock()
mock_project.get_openai_client.return_value = MagicMock()

RawFoundryAgentChatClient(
project_client=mock_project,
agent_name="test-agent",
default_headers=custom_headers,
)

mock_project.get_openai_client.assert_called_once_with(default_headers=custom_headers)


def test_raw_foundry_agent_chat_client_init_without_default_headers_passes_none_to_openai_client() -> None:
"""When no default_headers are provided, get_openai_client() receives None."""
mock_project = MagicMock()
mock_project.get_openai_client.return_value = MagicMock()

RawFoundryAgentChatClient(
project_client=mock_project,
agent_name="test-agent",
)

mock_project.get_openai_client.assert_called_once_with(default_headers=None)


def test_raw_foundry_agent_chat_client_get_agent_reference_with_version() -> None:
"""Test agent reference includes version when provided."""

Expand Down
31 changes: 31 additions & 0 deletions python/packages/foundry/tests/foundry/test_foundry_chat_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,37 @@ def test_init_with_default_header() -> None:
assert client.default_headers[key] == value


def test_init_forwards_default_headers_to_openai_client() -> None:
"""default_headers must be forwarded into get_openai_client() so the underlying
AsyncOpenAI client sends them on every outbound request."""
custom_headers = {"x-custom-header": "test-value"}
mock_openai_client = _make_mock_openai_client()
project_client = MagicMock()
project_client.get_openai_client.return_value = mock_openai_client

FoundryChatClient(
project_client=project_client,
model=_TEST_FOUNDRY_MODEL,
default_headers=custom_headers,
)

project_client.get_openai_client.assert_called_once_with(default_headers=custom_headers)


def test_init_without_default_headers_passes_none_to_openai_client() -> None:
"""When no default_headers are provided, get_openai_client() receives None."""
mock_openai_client = _make_mock_openai_client()
project_client = MagicMock()
project_client.get_openai_client.return_value = mock_openai_client

FoundryChatClient(
project_client=project_client,
model=_TEST_FOUNDRY_MODEL,
)

project_client.get_openai_client.assert_called_once_with(default_headers=None)


def test_init_with_project_endpoint_creates_project_client() -> None:
credential = MagicMock()
mock_openai_client = _make_mock_openai_client()
Expand Down
4 changes: 4 additions & 0 deletions python/packages/openai/agent_framework_openai/_shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,8 @@ def load_openai_service_settings(
if resolved_model := _resolve_named_setting(openai_settings, openai_model_fields):
openai_settings["model"] = resolved_model
if client:
if merged_headers:
client = client.with_options(default_headers=merged_headers)
return openai_settings, client, False # type: ignore[return-value]
if openai_settings.get("api_key") is not None or api_key_callable is not None:
resolved_model = _resolve_named_setting(openai_settings, openai_model_fields)
Expand Down Expand Up @@ -260,6 +262,8 @@ def load_openai_service_settings(
f"or the {deployment_env_guidance} environment variable."
)
if client:
if merged_headers:
client = client.with_options(default_headers=merged_headers)
return azure_settings, client, True # type: ignore[return-value]
client_args["default_headers"] = merged_headers
if endpoint := azure_settings.get("endpoint"):
Expand Down
64 changes: 64 additions & 0 deletions python/packages/openai/tests/openai/test_openai_shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
AZURE_OPENAI_TOKEN_SCOPE,
_ensure_async_token_provider,
_resolve_azure_credential_to_token_provider,
load_openai_service_settings,
)


Expand Down Expand Up @@ -76,3 +77,66 @@ async def async_provider() -> str:
result = await wrapper()

assert result == "async-token"


def test_load_openai_service_settings_applies_default_headers_to_prebuilt_client() -> None:
"""When a pre-built client is provided, default_headers must be applied to it.

load_openai_service_settings used to early-return the pre-built client
without applying merged_headers, silently dropping any custom headers the
caller passed.
"""
pre_built = MagicMock()
new_client = MagicMock()
pre_built.with_options.return_value = new_client

_, client, _ = load_openai_service_settings(
model="gpt-4o",
api_key=None,
credential=None,
org_id=None,
base_url=None,
endpoint=None,
api_version=None,
default_azure_api_version="2024-05-01-preview",
default_headers={"x-custom-header": "test-value"},
client=pre_built,
env_file_path=None,
env_file_encoding=None,
)

pre_built.with_options.assert_called_once()
call_kwargs = pre_built.with_options.call_args.kwargs
assert call_kwargs.get("default_headers", {}).get("x-custom-header") == "test-value"
assert client is new_client


def test_load_openai_service_settings_no_headers_still_applies_app_info() -> None:
"""Even with no default_headers, APP_INFO telemetry headers are applied via with_options."""
pre_built = MagicMock()
new_client = MagicMock()
pre_built.with_options.return_value = new_client

with (
patch("agent_framework_openai._shared.APP_INFO", {"agent-framework-version": "python/test-version"}),
patch("agent_framework._telemetry.AGENT_FRAMEWORK_USER_AGENT", "agent-framework-python/test-version"),
):
_, client, _ = load_openai_service_settings(
model="gpt-4o",
api_key=None,
credential=None,
org_id=None,
base_url=None,
endpoint=None,
api_version=None,
default_azure_api_version="2024-05-01-preview",
default_headers=None,
client=pre_built,
env_file_path=None,
env_file_encoding=None,
)

pre_built.with_options.assert_called_once()
call_kwargs = pre_built.with_options.call_args.kwargs
assert call_kwargs.get("default_headers", {}).get("User-Agent") == "agent-framework-python/test-version"
assert client is new_client
Comment thread
moonbox3 marked this conversation as resolved.
Loading