Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 14 additions & 6 deletions lib/crewai/src/crewai/llms/providers/anthropic/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,14 @@
from crewai.llms.base_llm import BaseLLM, JsonResponseFormat, llm_call_context
from crewai.llms.hooks.base import BaseInterceptor
from crewai.llms.hooks.transport import AsyncHTTPTransport, HTTPTransport
from crewai.llms.providers.utils.common import safe_tool_conversion
from crewai.utilities.agent_utils import is_context_length_exceeded
from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededError,
)
from crewai.utilities.pydantic_schema_utils import (
sanitize_tool_params_for_anthropic_strict,
)
from crewai.utilities.types import LLMMessage


Expand Down Expand Up @@ -473,10 +477,8 @@ def _convert_tools_for_interference(
continue

try:
from crewai.llms.providers.utils.common import safe_tool_conversion

name, description, parameters = safe_tool_conversion(tool, "Anthropic")
except (ImportError, KeyError, ValueError) as e:
except (KeyError, ValueError) as e:
logging.error(f"Error converting tool to Anthropic format: {e}")
raise e

Expand All @@ -485,17 +487,23 @@ def _convert_tools_for_interference(
"description": description,
}

func_info = tool.get("function", {})
strict_enabled = bool(func_info.get("strict"))

if parameters and isinstance(parameters, dict):
anthropic_tool["input_schema"] = parameters
anthropic_tool["input_schema"] = (
sanitize_tool_params_for_anthropic_strict(parameters)
if strict_enabled
else parameters
)
else:
anthropic_tool["input_schema"] = {
"type": "object",
"properties": {},
"required": [],
}

func_info = tool.get("function", {})
if func_info.get("strict"):
if strict_enabled:
anthropic_tool["strict"] = True

anthropic_tools.append(anthropic_tool)
Expand Down
21 changes: 15 additions & 6 deletions lib/crewai/src/crewai/llms/providers/bedrock/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,15 @@

from crewai.events.types.llm_events import LLMCallType
from crewai.llms.base_llm import BaseLLM, llm_call_context
from crewai.llms.providers.utils.common import safe_tool_conversion
from crewai.utilities.agent_utils import is_context_length_exceeded
from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededError,
)
from crewai.utilities.pydantic_schema_utils import generate_model_description
from crewai.utilities.pydantic_schema_utils import (
generate_model_description,
sanitize_tool_params_for_bedrock_strict,
)
from crewai.utilities.types import LLMMessage


Expand Down Expand Up @@ -1949,8 +1953,6 @@ def _format_tools_for_converse(
tools: list[dict[str, Any]],
) -> list[ConverseToolTypeDef]:
"""Convert CrewAI tools to Converse API format following AWS specification."""
from crewai.llms.providers.utils.common import safe_tool_conversion

converse_tools: list[ConverseToolTypeDef] = []

for tool in tools:
Expand All @@ -1962,12 +1964,19 @@ def _format_tools_for_converse(
"description": description,
}

func_info = tool.get("function", {})
strict_enabled = bool(func_info.get("strict"))

if parameters and isinstance(parameters, dict):
input_schema: ToolInputSchema = {"json": parameters}
schema_params = (
sanitize_tool_params_for_bedrock_strict(parameters)
if strict_enabled
else parameters
)
input_schema: ToolInputSchema = {"json": schema_params}
tool_spec["inputSchema"] = input_schema

func_info = tool.get("function", {})
if func_info.get("strict"):
if strict_enabled:
tool_spec["strict"] = True

converse_tool: ConverseToolTypeDef = {"toolSpec": tool_spec}
Expand Down
18 changes: 8 additions & 10 deletions lib/crewai/src/crewai/llms/providers/openai/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,15 @@
from crewai.llms.base_llm import BaseLLM, JsonResponseFormat, llm_call_context
from crewai.llms.hooks.base import BaseInterceptor
from crewai.llms.hooks.transport import AsyncHTTPTransport, HTTPTransport
from crewai.llms.providers.utils.common import safe_tool_conversion
from crewai.utilities.agent_utils import is_context_length_exceeded
from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededError,
)
from crewai.utilities.pydantic_schema_utils import generate_model_description
from crewai.utilities.pydantic_schema_utils import (
generate_model_description,
sanitize_tool_params_for_openai_strict,
)
from crewai.utilities.types import LLMMessage


Expand Down Expand Up @@ -764,8 +768,6 @@ def _convert_tools_for_responses(
"function": {"name": "...", "description": "...", "parameters": {...}}
}
"""
from crewai.llms.providers.utils.common import safe_tool_conversion

responses_tools = []

for tool in tools:
Expand Down Expand Up @@ -1548,11 +1550,6 @@ def _convert_tools_for_interference(
self, tools: list[dict[str, BaseTool]]
) -> list[dict[str, Any]]:
"""Convert CrewAI tool format to OpenAI function calling format."""
from crewai.llms.providers.utils.common import safe_tool_conversion
from crewai.utilities.pydantic_schema_utils import (
force_additional_properties_false,
)

openai_tools = []

for tool in tools:
Expand All @@ -1571,8 +1568,9 @@ def _convert_tools_for_interference(
params_dict = (
parameters if isinstance(parameters, dict) else dict(parameters)
)
params_dict = force_additional_properties_false(params_dict)
openai_tool["function"]["parameters"] = params_dict
openai_tool["function"]["parameters"] = (
sanitize_tool_params_for_openai_strict(params_dict)
)

openai_tools.append(openai_tool)
return openai_tools
Expand Down
115 changes: 114 additions & 1 deletion lib/crewai/src/crewai/utilities/pydantic_schema_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from copy import deepcopy
import datetime
import logging
from typing import TYPE_CHECKING, Annotated, Any, Final, Literal, TypedDict, Union
from typing import TYPE_CHECKING, Annotated, Any, Final, Literal, TypedDict, Union, cast
import uuid

import jsonref # type: ignore[import-untyped]
Expand Down Expand Up @@ -417,6 +417,119 @@ def strip_null_from_types(schema: dict[str, Any]) -> dict[str, Any]:
return schema


_STRICT_METADATA_KEYS: Final[tuple[str, ...]] = (
"title",
"default",
"examples",
"example",
"$comment",
"readOnly",
"writeOnly",
"deprecated",
)

_CLAUDE_STRICT_UNSUPPORTED: Final[tuple[str, ...]] = (
"minimum",
"maximum",
"exclusiveMinimum",
"exclusiveMaximum",
"multipleOf",
"minLength",
"maxLength",
"pattern",
"minItems",
"maxItems",
"uniqueItems",
"minContains",
"maxContains",
"minProperties",
"maxProperties",
"patternProperties",
"propertyNames",
"dependentRequired",
"dependentSchemas",
)


def _strip_keys_recursive(d: Any, keys: tuple[str, ...]) -> Any:
"""Recursively delete a fixed set of keys from a schema."""
if isinstance(d, dict):
for key in keys:
d.pop(key, None)
for v in d.values():
_strip_keys_recursive(v, keys)
elif isinstance(d, list):
for i in d:
_strip_keys_recursive(i, keys)
return d


def lift_top_level_anyof(schema: dict[str, Any]) -> dict[str, Any]:
"""Unwrap a top-level anyOf/oneOf/allOf wrapping a single object variant.

Anthropic's strict ``input_schema`` rejects top-level union keywords. When
exactly one variant is an object schema, lift it so the root is a plain
object; otherwise leave the schema alone.
"""
for key in ("anyOf", "oneOf", "allOf"):
variants = schema.get(key)
if not isinstance(variants, list):
continue
object_variants = [
v for v in variants if isinstance(v, dict) and v.get("type") == "object"
]
if len(object_variants) == 1:
lifted = deepcopy(object_variants[0])
schema.pop(key)
schema.update(lifted)
break
return schema


def _common_strict_pipeline(params: dict[str, Any]) -> dict[str, Any]:
"""Shared strict sanitization: inline refs, close objects, require all properties."""
sanitized = resolve_refs(deepcopy(params))
sanitized.pop("$defs", None)
sanitized = convert_oneof_to_anyof(sanitized)
sanitized = ensure_type_in_schemas(sanitized)
sanitized = force_additional_properties_false(sanitized)
sanitized = ensure_all_properties_required(sanitized)
return cast(dict[str, Any], _strip_keys_recursive(sanitized, _STRICT_METADATA_KEYS))


def sanitize_tool_params_for_openai_strict(
params: dict[str, Any],
) -> dict[str, Any]:
"""Sanitize a JSON schema for OpenAI strict function calling."""
if not isinstance(params, dict):
return params
return cast(
dict[str, Any], strip_unsupported_formats(_common_strict_pipeline(params))
)


def sanitize_tool_params_for_anthropic_strict(
params: dict[str, Any],
) -> dict[str, Any]:
"""Sanitize a JSON schema for Anthropic strict tool use."""
if not isinstance(params, dict):
return params
sanitized = lift_top_level_anyof(_common_strict_pipeline(params))
sanitized = _strip_keys_recursive(sanitized, _CLAUDE_STRICT_UNSUPPORTED)
return cast(dict[str, Any], strip_unsupported_formats(sanitized))


def sanitize_tool_params_for_bedrock_strict(
params: dict[str, Any],
) -> dict[str, Any]:
"""Sanitize a JSON schema for Bedrock Converse strict tool use.

Bedrock Converse uses the same grammar compiler as the underlying Claude
model, so the constraints match Anthropic's.
"""
return sanitize_tool_params_for_anthropic_strict(params)


def generate_model_description(
model: type[BaseModel],
*,
Expand Down
Loading