diff --git a/src/strands/models/openai.py b/src/strands/models/openai.py index 6e32c5bdd..764cb8519 100644 --- a/src/strands/models/openai.py +++ b/src/strands/models/openai.py @@ -18,6 +18,7 @@ class Client(Protocol): """Protocol defining the OpenAI-compatible interface for the underlying provider client.""" @property + # pragma: no cover def chat(self) -> Any: """Chat completions interface.""" ... diff --git a/src/strands/types/models/openai.py b/src/strands/types/models/openai.py index 8f5ffab32..c00a7774b 100644 --- a/src/strands/types/models/openai.py +++ b/src/strands/types/models/openai.py @@ -206,7 +206,9 @@ def format_chunk(self, event: dict[str, Any]) -> StreamEvent: case "content_delta": if event["data_type"] == "tool": - return {"contentBlockDelta": {"delta": {"toolUse": {"input": event["data"].function.arguments}}}} + return { + "contentBlockDelta": {"delta": {"toolUse": {"input": event["data"].function.arguments or ""}}} + } return {"contentBlockDelta": {"delta": {"text": event["data"]}}} diff --git a/tests/strands/types/models/test_openai.py b/tests/strands/types/models/test_openai.py index 97a0882ab..2657c334e 100644 --- a/tests/strands/types/models/test_openai.py +++ b/tests/strands/types/models/test_openai.py @@ -246,12 +246,12 @@ def test_format_request(model, messages, tool_specs, system_prompt): @pytest.mark.parametrize( ("event", "exp_chunk"), [ - # Case 1: Message start + # Message start ( {"chunk_type": "message_start"}, {"messageStart": {"role": "assistant"}}, ), - # Case 2: Content Start - Tool Use + # Content Start - Tool Use ( { "chunk_type": "content_start", @@ -260,12 +260,12 @@ def test_format_request(model, messages, tool_specs, system_prompt): }, {"contentBlockStart": {"start": {"toolUse": {"name": "calculator", "toolUseId": "c1"}}}}, ), - # Case 3: Content Start - Text + # Content Start - Text ( {"chunk_type": "content_start", "data_type": "text"}, {"contentBlockStart": {"start": {}}}, ), - # Case 4: Content Delta - Tool Use + # Content Delta - Tool Use ( { "chunk_type": "content_delta", @@ -274,32 +274,41 @@ def test_format_request(model, messages, tool_specs, system_prompt): }, {"contentBlockDelta": {"delta": {"toolUse": {"input": '{"expression": "2+2"}'}}}}, ), - # Case 5: Content Delta - Text + # Content Delta - Tool Use - None + ( + { + "chunk_type": "content_delta", + "data_type": "tool", + "data": unittest.mock.Mock(function=unittest.mock.Mock(arguments=None)), + }, + {"contentBlockDelta": {"delta": {"toolUse": {"input": ""}}}}, + ), + # Content Delta - Text ( {"chunk_type": "content_delta", "data_type": "text", "data": "hello"}, {"contentBlockDelta": {"delta": {"text": "hello"}}}, ), - # Case 6: Content Stop + # Content Stop ( {"chunk_type": "content_stop"}, {"contentBlockStop": {}}, ), - # Case 7: Message Stop - Tool Use + # Message Stop - Tool Use ( {"chunk_type": "message_stop", "data": "tool_calls"}, {"messageStop": {"stopReason": "tool_use"}}, ), - # Case 8: Message Stop - Max Tokens + # Message Stop - Max Tokens ( {"chunk_type": "message_stop", "data": "length"}, {"messageStop": {"stopReason": "max_tokens"}}, ), - # Case 9: Message Stop - End Turn + # Message Stop - End Turn ( {"chunk_type": "message_stop", "data": "stop"}, {"messageStop": {"stopReason": "end_turn"}}, ), - # Case 10: Metadata + # Metadata ( { "chunk_type": "metadata",