Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions sdk/ai/azure-ai-agents/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@
### Features Added

- Add `RunStepDetailsActivity`, describing MCP function parameters.
- Add `RunStepDeltaCustomBingGroundingToolCall`, describing `BingCustomSearchTool` updates in streaming scenario.

### Bugs Fixed

### Sample updates

- Bing Grounding and Bing Custom Search samples were fixed to correctly present references.

## 1.2.0b2 (2025-08-12)

### Features Added
Expand Down
6 changes: 2 additions & 4 deletions sdk/ai/azure-ai-agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ Here is an example:
<!-- SNIPPET:sample_agents_bing_grounding.create_agent_with_bing_grounding_tool -->

```python
conn_id = os.environ["AZURE_BING_CONNECTION_ID"]
conn_id = project_client.connections.get(os.environ["BING_CONNECTION_NAME"]).id

# Initialize agent bing tool and add the connection id
bing = BingGroundingTool(connection_id=conn_id)
Expand Down Expand Up @@ -474,9 +474,7 @@ The tool approval flow looks like this:
# Create and process agent run in thread with MCP tools
mcp_tool.update_headers("SuperSecret", "123456")
# mcp_tool.set_approval_mode("never") # Uncomment to disable approval requirement
run = agents_client.runs.create(
thread_id=thread.id, agent_id=agent.id, tool_resources=mcp_tool.resources
)
run = agents_client.runs.create(thread_id=thread.id, agent_id=agent.id, tool_resources=mcp_tool.resources)
print(f"Created run, ID: {run.id}")

while run.status in ["queued", "in_progress", "requires_action"]:
Expand Down
1 change: 1 addition & 0 deletions sdk/ai/azure-ai-agents/apiview-properties.json
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@
"azure.ai.agents.models.RunStepDeltaCodeInterpreterLogOutput": "Azure.AI.Agents.RunStepDeltaCodeInterpreterLogOutput",
"azure.ai.agents.models.RunStepDeltaCodeInterpreterToolCall": "Azure.AI.Agents.RunStepDeltaCodeInterpreterToolCall",
"azure.ai.agents.models.RunStepDeltaConnectedAgentToolCall": "Azure.AI.Agents.RunStepDeltaConnectedAgentToolCall",
"azure.ai.agents.models.RunStepDeltaCustomBingGroundingToolCall": "Azure.AI.Agents.RunStepDeltaCustomBingGroundingToolCall",
"azure.ai.agents.models.RunStepDeltaDeepResearchToolCall": "Azure.AI.Agents.RunStepDeltaDeepResearchToolCall",
"azure.ai.agents.models.RunStepDeltaDetail": "Azure.AI.Agents.RunStepDeltaDetail",
"azure.ai.agents.models.RunStepDeltaFileSearchToolCall": "Azure.AI.Agents.RunStepDeltaFileSearchToolCall",
Expand Down
2 changes: 1 addition & 1 deletion sdk/ai/azure-ai-agents/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/ai/azure-ai-agents",
"Tag": "python/ai/azure-ai-agents_ad0998d8d9"
"Tag": "python/ai/azure-ai-agents_6e57db9f8e"
}
2 changes: 2 additions & 0 deletions sdk/ai/azure-ai-agents/azure/ai/agents/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@
RunStepDeltaCodeInterpreterOutput,
RunStepDeltaCodeInterpreterToolCall,
RunStepDeltaConnectedAgentToolCall,
RunStepDeltaCustomBingGroundingToolCall,
RunStepDeltaDeepResearchToolCall,
RunStepDeltaDetail,
RunStepDeltaFileSearchToolCall,
Expand Down Expand Up @@ -382,6 +383,7 @@
"RunStepDeltaCodeInterpreterOutput",
"RunStepDeltaCodeInterpreterToolCall",
"RunStepDeltaConnectedAgentToolCall",
"RunStepDeltaCustomBingGroundingToolCall",
"RunStepDeltaDeepResearchToolCall",
"RunStepDeltaDetail",
"RunStepDeltaFileSearchToolCall",
Expand Down
65 changes: 55 additions & 10 deletions sdk/ai/azure-ai-agents/azure/ai/agents/models/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4425,15 +4425,16 @@ class RunStepBingCustomSearchToolCall(RunStepToolCall, discriminator="bing_custo
:ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is
"bing_custom_search".
:vartype type: str
:ivar bing_custom_search: Reserved for future use. Required.
:ivar bing_custom_search: The dictionary with request and response from Custom Bing Grounding
search tool. Required.
:vartype bing_custom_search: dict[str, str]
"""

type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
"""The object type, which is always 'bing_custom_search'. Required. Default value is
\"bing_custom_search\"."""
bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Reserved for future use. Required."""
"""The dictionary with request and response from Custom Bing Grounding search tool. Required."""

@overload
def __init__(
Expand Down Expand Up @@ -4465,15 +4466,16 @@ class RunStepBingGroundingToolCall(RunStepToolCall, discriminator="bing_groundin
:ivar type: The object type, which is always 'bing_grounding'. Required. Default value is
"bing_grounding".
:vartype type: str
:ivar bing_grounding: Reserved for future use. Required.
:ivar bing_grounding: The dictionary with request and response from Bing Grounding search tool.
Required.
:vartype bing_grounding: dict[str, str]
"""

type: Literal["bing_grounding"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
"""The object type, which is always 'bing_grounding'. Required. Default value is
\"bing_grounding\"."""
bing_grounding: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Reserved for future use. Required."""
"""The dictionary with request and response from Bing Grounding search tool. Required."""

@overload
def __init__(
Expand Down Expand Up @@ -4985,10 +4987,11 @@ class RunStepDeltaToolCall(_Model):
call details.

You probably want to use the sub-classes and not this class directly. Known sub-classes are:
RunStepDeltaAzureAISearchToolCall, RunStepDeltaBingGroundingToolCall,
RunStepDeltaCodeInterpreterToolCall, RunStepDeltaConnectedAgentToolCall,
RunStepDeltaDeepResearchToolCall, RunStepDeltaFileSearchToolCall, RunStepDeltaFunctionToolCall,
RunStepDeltaMcpToolCall, RunStepDeltaOpenAPIToolCall
RunStepDeltaAzureAISearchToolCall, RunStepDeltaCustomBingGroundingToolCall,
RunStepDeltaBingGroundingToolCall, RunStepDeltaCodeInterpreterToolCall,
RunStepDeltaConnectedAgentToolCall, RunStepDeltaDeepResearchToolCall,
RunStepDeltaFileSearchToolCall, RunStepDeltaFunctionToolCall, RunStepDeltaMcpToolCall,
RunStepDeltaOpenAPIToolCall

:ivar index: The index of the tool call detail in the run step's tool_calls array. Required.
:vartype index: int
Expand Down Expand Up @@ -5078,15 +5081,16 @@ class RunStepDeltaBingGroundingToolCall(RunStepDeltaToolCall, discriminator="bin
:ivar type: The object type, which is always "bing_grounding". Required. Default value is
"bing_grounding".
:vartype type: str
:ivar bing_grounding: Reserved for future use. Required.
:ivar bing_grounding: The dictionary with request and response from Bing Grounding search tool.
Required.
:vartype bing_grounding: dict[str, str]
"""

type: Literal["bing_grounding"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
"""The object type, which is always \"bing_grounding\". Required. Default value is
\"bing_grounding\"."""
bing_grounding: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""Reserved for future use. Required."""
"""The dictionary with request and response from Bing Grounding search tool. Required."""

@overload
def __init__(
Expand Down Expand Up @@ -5418,6 +5422,47 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="connected_agent", **kwargs)


class RunStepDeltaCustomBingGroundingToolCall(RunStepDeltaToolCall, discriminator="bing_custom_search"):
"""Represents the Custom Bing Grounding tool call in a streaming run step.

:ivar index: The index of the tool call detail in the run step's tool_calls array. Required.
:vartype index: int
:ivar id: The ID of the tool call, used when submitting outputs to the run. Required.
:vartype id: str
:ivar type: The object type, which is always 'bing_custom_search'. Required. Default value is
"bing_custom_search".
:vartype type: str
:ivar bing_custom_search: The dictionary with request and response from Custom Bing Grounding
search tool. Required.
:vartype bing_custom_search: dict[str, str]
"""

type: Literal["bing_custom_search"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore
"""The object type, which is always 'bing_custom_search'. Required. Default value is
\"bing_custom_search\"."""
bing_custom_search: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"])
"""The dictionary with request and response from Custom Bing Grounding search tool. Required."""

@overload
def __init__(
self,
*,
index: int,
id: str, # pylint: disable=redefined-builtin
bing_custom_search: Dict[str, str],
) -> None: ...

@overload
def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""

def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, type="bing_custom_search", **kwargs)


class RunStepDeltaDeepResearchToolCall(RunStepDeltaToolCall, discriminator="deep_research"):
"""Represents the Deep research in a streaming run step.

Expand Down
6 changes: 4 additions & 2 deletions sdk/ai/azure-ai-agents/azure_ai_agents_tests.env
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

########################################################################################################################
# Agents tests
#
#
AZURE_AI_AGENTS_TESTS_PROJECT_CONNECTION_STRING=
AZURE_AI_AGENTS_TESTS_PROJECT_ENDPOINT=
AZURE_AI_AGENTS_TESTS_DATA_PATH=
Expand All @@ -17,4 +17,6 @@ AZURE_AI_AGENTS_TESTS_SEARCH_CONNECTION_ID=
AZURE_AI_AGENTS_TESTS_IS_TEST_RUN=True
AZURE_AI_AGENTS_TESTS_BING_CONNECTION_ID=
AZURE_AI_AGENTS_TESTS_PLAYWRIGHT_CONNECTION_ID=
AZURE_AI_AGENTS_TESTS_DEEP_RESEARCH_MODEL=
AZURE_AI_AGENTS_TESTS_DEEP_RESEARCH_MODEL=
AZURE_AI_AGENTS_TESTS_BING_CUSTOM_CONNECTION_ID=
AZURE_AI_AGENTS_TESTS_BING_CONFIGURATION_NAME=
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@
page of your Azure AI Foundry portal.
2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
the "Models + endpoints" tab in your Azure AI Foundry project.
3) AZURE_BING_CONNECTION_ID - The connection id of the Bing connection, as found in the "Connected resources" tab
in your Azure AI Foundry project.
3) BING_CONNECTION_NAME - The name of a connection to the Bing search resource as it is
listed in Azure AI Foundry connected resources.
"""

import os
import re
from typing import Any
from azure.identity import DefaultAzureCredential
from azure.ai.projects import AIProjectClient
Expand All @@ -45,7 +46,9 @@
class MyEventHandler(AgentEventHandler):

def on_message_delta(self, delta: "MessageDeltaChunk") -> None:
print(f"Text delta received: {delta.text}")
# Do not print reference text as we will show actual citation instead.
if re.match(r"\u3010(.+)\u3011", delta.text) is None:
print(f"Text delta received: {delta.text}")
if delta.delta.content and isinstance(delta.delta.content[0], MessageDeltaTextContent):
delta_text_content = delta.delta.content[0]
if delta_text_content.text and delta_text_content.text.annotations:
Expand Down Expand Up @@ -85,7 +88,7 @@ def on_unhandled_event(self, event_type: str, event_data: Any) -> None:
with project_client:
agents_client = project_client.agents

bing_connection_id = os.environ["AZURE_BING_CONNECTION_ID"]
bing_connection_id = project_client.connections.get(os.environ["BING_CONNECTION_NAME"]).id
print(f"Bing Connection ID: {bing_connection_id}")

# Initialize agent bing tool and add the connection id
Expand Down Expand Up @@ -117,7 +120,12 @@ def on_unhandled_event(self, event_type: str, event_data: Any) -> None:

response_message = agents_client.messages.get_last_message_by_role(thread_id=thread.id, role=MessageRole.AGENT)
if response_message:
responses = []
for text_message in response_message.text_messages:
print(f"Agent response: {text_message.text.value}")
responses.append(text_message.text.value)
message = " ".join(responses)
for annotation in response_message.url_citation_annotations:
print(f"URL Citation: [{annotation.url_citation.title}]({annotation.url_citation.url})")
message = message.replace(
annotation.text, f" [{annotation.url_citation.title}]({annotation.url_citation.url})"
)
print(f"Agent response: {message}")
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@
page of your Azure AI Foundry portal.
2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
the "Models + endpoints" tab in your Azure AI Foundry project.
3) AZURE_BING_CONNECTION_ID - The ID of the Bing connection, as found in the "Connected resources" tab
in your Azure AI Foundry project.
3) BING_CONNECTION_NAME - The name of a connection to the Bing search resource as it is
listed in Azure AI Foundry connected resources.
"""

import os
import re
from azure.ai.projects import AIProjectClient
from azure.ai.agents.models import AgentStreamEvent, RunStepDeltaChunk
from azure.ai.agents.models import (
Expand All @@ -48,7 +49,7 @@
with project_client:
agents_client = project_client.agents

bing_connection_id = os.environ["AZURE_BING_CONNECTION_ID"]
bing_connection_id = project_client.connections.get(os.environ["BING_CONNECTION_NAME"]).id
bing = BingGroundingTool(connection_id=bing_connection_id)
print(f"Bing Connection ID: {bing_connection_id}")

Expand All @@ -69,12 +70,15 @@
print(f"Created message, message ID {message.id}")

# Process Agent run and stream events back to the client. It may take a few minutes for the agent to complete the run.
reference_text = re.compile(r"\u3010(.+)\u3011")
with agents_client.runs.stream(thread_id=thread.id, agent_id=agent.id) as stream:

for event_type, event_data, _ in stream:

if isinstance(event_data, MessageDeltaChunk):
print(f"Text delta received: {event_data.text}")
# Do not print reference text as we will show actual citation instead.
if reference_text.match(event_data.text) is None:
print(f"Text delta received: {event_data.text}")
if event_data.delta.content and isinstance(event_data.delta.content[0], MessageDeltaTextContent):
delta_text_content = event_data.delta.content[0]
if delta_text_content.text and delta_text_content.text.annotations:
Expand Down Expand Up @@ -113,7 +117,12 @@

response_message = agents_client.messages.get_last_message_by_role(thread_id=thread.id, role=MessageRole.AGENT)
if response_message:
responses = []
for text_message in response_message.text_messages:
print(f"Agent response: {text_message.text.value}")
responses.append(text_message.text.value)
message = " ".join(responses)
for annotation in response_message.url_citation_annotations:
print(f"URL Citation: [{annotation.url_citation.title}]({annotation.url_citation.url})")
message = message.replace(
annotation.text, f" [{annotation.url_citation.title}]({annotation.url_citation.url})"
)
print(f"Agent response: {message}")
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,7 @@
# Process Agent run and stream events back to the client. It may take a few minutes for the agent to complete the run.
mcp_tool.update_headers("SuperSecret", "123456")
# mcp_tool.set_approval_mode("never") # Uncomment to disable approval requirement
with agents_client.runs.stream(
thread_id=thread.id, agent_id=agent.id, tool_resources=mcp_tool.resources
) as stream:
with agents_client.runs.stream(thread_id=thread.id, agent_id=agent.id, tool_resources=mcp_tool.resources) as stream:

for event_type, event_data, _ in stream:

Expand Down Expand Up @@ -201,9 +199,7 @@
agents_client.delete_agent(agent.id)
print("Deleted agent")

response_message = agents_client.messages.get_last_message_by_role(
thread_id=thread.id, role=MessageRole.AGENT
)
response_message = agents_client.messages.get_last_message_by_role(thread_id=thread.id, role=MessageRole.AGENT)
if response_message:
for text_message in response_message.text_messages:
print(f"Agent response: {text_message.text.value}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,24 +23,26 @@
page of your Azure AI Foundry portal.
2) MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
the "Models + endpoints" tab in your Azure AI Foundry project.
3) BING_CUSTOM_CONNECTION_ID - The ID of the Bing Custom Search connection, in the format of:
/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/Microsoft.MachineLearningServices/workspaces/{workspace-name}/connections/{connection-name}
3) BING_CUSTOM_CONNECTION_NAME - The name of a connection to the custom search Bing resource as it is
listed in Azure AI Foundry connected resources.
4) BING_CONFIGURATION_NAME - the name of a search configuration in Grounding with Bing Custom Search
resource.
"""

import os
from azure.ai.projects import AIProjectClient
from azure.identity import DefaultAzureCredential
from azure.ai.agents.models import BingCustomSearchTool
from azure.ai.agents.models import BingCustomSearchTool, ListSortOrder

project_client = AIProjectClient(
endpoint=os.environ["PROJECT_ENDPOINT"],
credential=DefaultAzureCredential(),
)

conn_id = os.environ["BING_CUSTOM_CONNECTION_ID"]
conn_id = project_client.connections.get(os.environ["BING_CUSTOM_CONNECTION_NAME"]).id

# Initialize Bing Custom Search tool with connection id and instance name
bing_custom_tool = BingCustomSearchTool(connection_id=conn_id, instance_name="<config_instance_name>")
bing_custom_tool = BingCustomSearchTool(connection_id=conn_id, instance_name=os.environ["BING_CONFIGURATION_NAME"])

# Create Agent with the Bing Custom Search tool and process Agent run
with project_client:
Expand Down Expand Up @@ -78,10 +80,15 @@
print("Deleted agent")

# Fetch and log all messages
messages = agents_client.messages.list(thread_id=thread.id)
messages = agents_client.messages.list(thread_id=thread.id, order=ListSortOrder.ASCENDING)
for msg in messages:
if msg.text_messages:
responses = []
for text_message in msg.text_messages:
print(f"Agent response: {text_message.text.value}")
responses.append(text_message.text.value)
message = " ".join(responses)
for annotation in msg.url_citation_annotations:
print(f"URL Citation: [{annotation.url_citation.title}]({annotation.url_citation.url})")
message = message.replace(
annotation.text, f" [{annotation.url_citation.title}]({annotation.url_citation.url})"
)
print(f"{msg.role}: {message}")
Loading