From 1bca7310cffdbb5b1d74cf56648a2f7852b146f8 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 15:47:52 +0000
Subject: [PATCH 1/6] Add 3 new hosted agent samples: AgentWithTools,
AgentWithLocalTools, AgentThreadAndHITL
- AgentWithTools: Foundry tools (MCP + code interpreter) via UseFoundryTools
- AgentWithLocalTools: Local C# function tool (Seattle hotel search) with AIProjectClient
- AgentThreadAndHITL: Human-in-the-loop with ApprovalRequiredAIFunction and thread persistence
All samples follow agent-framework conventions (net10.0, AzureCliCredential, CPM disabled).
AgentWithTools includes comprehensive README with setup guide and troubleshooting.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../AgentThreadAndHITL.csproj | 70 ++++++++++
.../AgentThreadAndHITL/Dockerfile | 20 +++
.../AgentThreadAndHITL/Program.cs | 38 ++++++
.../HostedAgents/AgentThreadAndHITL/README.md | 44 ++++++
.../AgentThreadAndHITL/agent.yaml | 28 ++++
.../AgentThreadAndHITL/test_requests.py | 92 +++++++++++++
.../AgentWithLocalTools/.dockerignore | 24 ++++
.../AgentWithLocalTools.csproj | 70 ++++++++++
.../AgentWithLocalTools/Dockerfile | 20 +++
.../AgentWithLocalTools/Program.cs | 129 ++++++++++++++++++
.../AgentWithLocalTools/README.md | 37 +++++
.../AgentWithLocalTools/agent.yaml | 29 ++++
.../AgentWithLocalTools/run-requests.http | 52 +++++++
.../AgentWithTools/AgentWithTools.csproj | 69 ++++++++++
.../HostedAgents/AgentWithTools/Dockerfile | 20 +++
.../HostedAgents/AgentWithTools/Program.cs | 43 ++++++
.../HostedAgents/AgentWithTools/README.md | 125 +++++++++++++++++
.../HostedAgents/AgentWithTools/agent.yaml | 31 +++++
.../AgentWithTools/run-requests.http | 30 ++++
19 files changed, 971 insertions(+)
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
new file mode 100644
index 0000000000..1dee424664
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
@@ -0,0 +1,70 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+ $(NoWarn);MEAI001
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
new file mode 100644
index 0000000000..004bd49fa8
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentThreadAndHITL.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
new file mode 100644
index 0000000000..305b9835ed
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence.
+// The agent wraps function tools with ApprovalRequiredAIFunction to require user approval
+// before invoking them. Users respond with 'approve' or 'reject' when prompted.
+
+using System.ComponentModel;
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Azure.AI.AgentServer.AgentFramework.Persistence;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+
+var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+
+[Description("Get the weather for a given location.")]
+static string GetWeather([Description("The location to get the weather for.")] string location)
+ => $"The weather in {location} is cloudy with a high of 15°C.";
+
+// Create the chat client and agent.
+// Note: ApprovalRequiredAIFunction wraps the tool to require user approval before invocation.
+// User should reply with 'approve' or 'reject' when prompted.
+#pragma warning disable MEAI001 // Type is for evaluation purposes only
+AIAgent agent = new AzureOpenAIClient(
+ new Uri(endpoint),
+ new AzureCliCredential())
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .CreateAIAgent(
+ instructions: "You are a helpful assistant",
+ tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]
+ );
+#pragma warning restore MEAI001
+
+var threadRepository = new InMemoryAgentThreadRepository(agent);
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents", threadRepository: threadRepository);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
new file mode 100644
index 0000000000..754142b7bb
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
@@ -0,0 +1,44 @@
+# What this sample demonstrates
+
+This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence. The agent wraps function tools with `ApprovalRequiredAIFunction` so that every tool invocation requires explicit user approval before execution. Thread state is maintained across requests using `InMemoryAgentThreadRepository`.
+
+Key features:
+- Requiring human approval before executing function calls
+- Persisting conversation threads across multiple requests
+- Approving or rejecting tool invocations at runtime
+
+## Prerequisites
+
+Before running this sample, ensure you have:
+
+1. .NET 10 SDK installed
+2. An Azure OpenAI endpoint configured
+3. A deployment of a chat model (e.g., gpt-4o-mini)
+4. Azure CLI installed and authenticated (`az login`)
+
+## Environment Variables
+
+Set the following environment variables:
+
+```powershell
+# Replace with your Azure OpenAI endpoint
+$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
+
+# Optional, defaults to gpt-4o-mini
+$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini"
+```
+
+## How It Works
+
+The sample uses `ApprovalRequiredAIFunction` to wrap standard AI function tools. When the model decides to call a tool, the wrapper intercepts the invocation and returns a HITL approval request to the caller instead of executing the function immediately.
+
+1. The user sends a message (e.g., "What is the weather in Vancouver?")
+2. The model determines a function call is needed and selects the `GetWeather` tool
+3. `ApprovalRequiredAIFunction` intercepts the call and returns an approval request containing the function name and arguments
+4. The user responds with `approve` or `reject`
+5. If approved, the function executes and the model generates a response using the result
+6. If rejected, the model generates a response without the function result
+
+Thread persistence is handled by `InMemoryAgentThreadRepository`, which stores conversation history keyed by `conversation.id`. This means the HITL flow works across multiple HTTP requests as long as each request includes the same `conversation.id`.
+
+> **Note:** HITL requires a stable `conversation.id` in every request so the agent can correlate the approval response with the original function call.
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
new file mode 100644
index 0000000000..aa78734283
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
@@ -0,0 +1,28 @@
+name: AgentThreadAndHITL
+displayName: "Weather Assistant Agent"
+description: >
+ A Weather Assistant Agent that provides weather information and forecasts. It
+ demonstrates how to use Azure AI AgentServer with Human-in-the-Loop (HITL)
+ capabilities to get human approval for functional calls.
+metadata:
+ authors:
+ - Microsoft Agent Framework Team
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Human-in-the-Loop
+template:
+ kind: hosted
+ name: AgentThreadAndHITL
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_OPENAI_ENDPOINT
+ value: ${AZURE_OPENAI_ENDPOINT}
+ - name: AZURE_OPENAI_DEPLOYMENT_NAME
+ value: gpt-4o-mini
+resources:
+ - name: "gpt-4o-mini"
+ kind: model
+ id: gpt-4o-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
new file mode 100644
index 0000000000..025be5e45a
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
@@ -0,0 +1,92 @@
+import json
+import os
+import secrets
+import string
+
+import requests
+
+
+base_url = os.getenv("AGENTSERVER_URL", "http://localhost:8088").rstrip("/")
+url = base_url if base_url.endswith("/responses") else f"{base_url}/responses"
+stream = False
+
+
+alphanum = string.ascii_letters + string.digits
+
+
+def create_conversation_id():
+ # Match AgentServer expected format: conv_<18-char partition><32-char entropy>
+ return "conv_" + "".join(secrets.choice(alphanum) for _ in range(50))
+
+
+def extract_conversation_id(response_detail):
+ conversation = response_detail.get("conversation")
+ if isinstance(conversation, dict):
+ conversation_id = conversation.get("id")
+ if isinstance(conversation_id, str) and conversation_id:
+ return conversation_id
+ return None
+
+
+user_input = "What is the weather like in Vancouver?"
+conversation_id = create_conversation_id()
+payload = {
+ "agent": {"name": "local_agent", "type": "agent_reference"},
+ "tools": [],
+ "stream": stream,
+ "input": user_input,
+ "conversation": {"id": conversation_id},
+}
+
+call_id = None
+
+try:
+ response = requests.post(url, json=payload)
+ response.raise_for_status()
+
+ response_detail = response.json()
+ print(json.dumps(response_detail, indent=2))
+
+ returned_conversation_id = extract_conversation_id(response_detail)
+ if returned_conversation_id:
+ conversation_id = returned_conversation_id
+
+ output = response_detail.get("output", [])
+ if isinstance(output, list):
+ for item in output:
+ if item.get("type") == "function_call" and item.get("name") == "__hosted_agent_adapter_hitl__":
+ call_id = item.get("call_id")
+ break
+except Exception as e:
+ print(f"Error: {e}")
+
+print("\n\n")
+print(f"conversation_id: {conversation_id}")
+print(f"call_id: {call_id}")
+
+if not call_id:
+ print("Failed to parse hitl request info")
+else:
+ human_feedback = {
+ "call_id": call_id,
+ "output": "approve",
+ "type": "function_call_output",
+ }
+
+ feedback_payload = {
+ "agent": {"name": "local_agent", "type": "agent_reference"},
+ "tools": [],
+ "stream": stream,
+ "input": [human_feedback],
+ "conversation": {"id": conversation_id},
+ }
+
+ try:
+ print("\n\nsending feedback...")
+ print(json.dumps(feedback_payload, indent=2))
+ response = requests.post(url, json=feedback_payload)
+ response.raise_for_status()
+ print("\n\nagent response:")
+ print(json.dumps(response.json(), indent=2))
+ except Exception as e:
+ print(f"Error: {e}")
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
new file mode 100644
index 0000000000..2afa2c2601
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
@@ -0,0 +1,24 @@
+**/.dockerignore
+**/.env
+**/.git
+**/.gitignore
+**/.project
+**/.settings
+**/.toolstarget
+**/.vs
+**/.vscode
+**/*.*proj.user
+**/*.dbmdl
+**/*.jfm
+**/azds.yaml
+**/bin
+**/charts
+**/docker-compose*
+**/Dockerfile*
+**/node_modules
+**/npm-debug.log
+**/obj
+**/secrets.dev.yaml
+**/values.dev.yaml
+LICENSE
+README.md
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
new file mode 100644
index 0000000000..ee737683ac
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
@@ -0,0 +1,70 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+ true
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
new file mode 100644
index 0000000000..c2461965a4
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentWithLocalTools.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
new file mode 100644
index 0000000000..35a875eb8d
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// Seattle Hotel Agent - A simple agent with a tool to find hotels in Seattle.
+// Uses Microsoft Agent Framework with Azure AI Foundry.
+// Ready for deployment to Foundry Hosted Agent service.
+
+using System.ComponentModel;
+using System.Globalization;
+using System.Text;
+using System.ClientModel.Primitives;
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Azure.AI.OpenAI;
+using Azure.AI.Projects;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-4.1-mini";
+Console.WriteLine($"Project Endpoint: {endpoint}");
+Console.WriteLine($"Model Deployment: {deploymentName}");
+
+var seattleHotels = new[]
+{
+ new Hotel("Contoso Suites", 189, 4.5, "Downtown"),
+ new Hotel("Fabrikam Residences", 159, 4.2, "Pike Place Market"),
+ new Hotel("Alpine Ski House", 249, 4.7, "Seattle Center"),
+ new Hotel("Margie's Travel Lodge", 219, 4.4, "Waterfront"),
+ new Hotel("Northwind Inn", 139, 4.0, "Capitol Hill"),
+ new Hotel("Relecloud Hotel", 99, 3.8, "University District"),
+};
+
+[Description("Get available hotels in Seattle for the specified dates. This simulates a call to a hotel availability API.")]
+string GetAvailableHotels(
+ [Description("Check-in date in YYYY-MM-DD format")] string checkInDate,
+ [Description("Check-out date in YYYY-MM-DD format")] string checkOutDate,
+ [Description("Maximum price per night in USD (optional, defaults to 500)")] int maxPrice = 500)
+{
+ try
+ {
+ if (!DateTime.TryParseExact(checkInDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkIn))
+ {
+ return "Error parsing check-in date. Please use YYYY-MM-DD format.";
+ }
+
+ if (!DateTime.TryParseExact(checkOutDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkOut))
+ {
+ return "Error parsing check-out date. Please use YYYY-MM-DD format.";
+ }
+
+ if (checkOut <= checkIn)
+ {
+ return "Error: Check-out date must be after check-in date.";
+ }
+
+ var nights = (checkOut - checkIn).Days;
+ var availableHotels = seattleHotels.Where(h => h.PricePerNight <= maxPrice).ToList();
+
+ if (availableHotels.Count == 0)
+ {
+ return $"No hotels found in Seattle within your budget of ${maxPrice}/night.";
+ }
+
+ var result = new StringBuilder();
+ result.AppendLine($"Available hotels in Seattle from {checkInDate} to {checkOutDate} ({nights} nights):");
+ result.AppendLine();
+
+ foreach (var hotel in availableHotels)
+ {
+ var totalCost = hotel.PricePerNight * nights;
+ result.AppendLine($"**{hotel.Name}**");
+ result.AppendLine($" Location: {hotel.Location}");
+ result.AppendLine($" Rating: {hotel.Rating}/5");
+ result.AppendLine($" ${hotel.PricePerNight}/night (Total: ${totalCost})");
+ result.AppendLine();
+ }
+
+ return result.ToString();
+ }
+ catch (Exception ex)
+ {
+ return $"Error processing request. Details: {ex.Message}";
+ }
+}
+
+var credential = new AzureCliCredential();
+AIProjectClient projectClient = new AIProjectClient(new Uri(endpoint), credential);
+
+ClientConnection connection = projectClient.GetConnection(typeof(AzureOpenAIClient).FullName!);
+
+if (!connection.TryGetLocatorAsUri(out Uri? openAiEndpoint) || openAiEndpoint is null)
+{
+ throw new InvalidOperationException("Failed to get OpenAI endpoint from project connection.");
+}
+openAiEndpoint = new Uri($"https://{openAiEndpoint.Host}");
+Console.WriteLine($"OpenAI Endpoint: {openAiEndpoint}");
+
+var chatClient = new AzureOpenAIClient(openAiEndpoint, credential)
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
+ .Build();
+
+var agent = new ChatClientAgent(chatClient,
+ name: "SeattleHotelAgent",
+ instructions: """
+ You are a helpful travel assistant specializing in finding hotels in Seattle, Washington.
+
+ When a user asks about hotels in Seattle:
+ 1. Ask for their check-in and check-out dates if not provided
+ 2. Ask about their budget preferences if not mentioned
+ 3. Use the GetAvailableHotels tool to find available options
+ 4. Present the results in a friendly, informative way
+ 5. Offer to help with additional questions about the hotels or Seattle
+
+ Be conversational and helpful. If users ask about things outside of Seattle hotels,
+ politely let them know you specialize in Seattle hotel recommendations.
+ """,
+ tools: [AIFunctionFactory.Create(GetAvailableHotels)])
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
+ .Build();
+
+Console.WriteLine("Seattle Hotel Agent Server running on http://localhost:8088");
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
+
+sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
new file mode 100644
index 0000000000..6d791ce7b7
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
@@ -0,0 +1,37 @@
+# What this sample demonstrates
+
+This sample demonstrates how to build a hosted agent that uses local C# function tools — a key advantage of code-based hosted agents over prompt agents. The agent acts as a Seattle travel assistant with a `GetAvailableHotels` tool that simulates querying a hotel availability API.
+
+Key features:
+- Defining local C# functions as agent tools using `AIFunctionFactory`
+- Using `AIProjectClient` to discover the OpenAI connection from the Azure AI Foundry project
+- Building a `ChatClientAgent` with custom instructions and tools
+- Deploying to the Foundry Hosted Agent service
+
+## Prerequisites
+
+Before running this sample, ensure you have:
+
+1. .NET 10 SDK installed
+2. An Azure AI Foundry Project with a chat model deployed (e.g., gpt-4.1-mini)
+3. Azure CLI installed and authenticated (`az login`)
+
+## Environment Variables
+
+Set the following environment variables:
+
+```powershell
+# Replace with your Azure AI Foundry project endpoint
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com/api/projects/your-project-name"
+
+# Optional, defaults to gpt-4.1-mini
+$env:MODEL_DEPLOYMENT_NAME="gpt-4.1-mini"
+```
+
+## How It Works
+
+1. The agent uses `AIProjectClient` to discover the Azure OpenAI connection from the project endpoint
+2. A local C# function `GetAvailableHotels` is registered as a tool using `AIFunctionFactory.Create`
+3. When users ask about hotels, the model invokes the local tool to search simulated hotel data
+4. The tool filters hotels by price and calculates total costs based on the requested dates
+5. Results are returned to the model, which presents them in a conversational format
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
new file mode 100644
index 0000000000..3f51fc842f
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
@@ -0,0 +1,29 @@
+name: seattle-hotel-agent
+description: >
+ A travel assistant agent that helps users find hotels in Seattle.
+ Demonstrates local C# tool execution - a key advantage of code-based
+ hosted agents over prompt agents.
+metadata:
+ authors:
+ - Microsoft
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Local Tools
+ - Travel Assistant
+ - Hotel Search
+template:
+ name: seattle-hotel-agent
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_AI_PROJECT_ENDPOINT
+ value: ${AZURE_AI_PROJECT_ENDPOINT}
+ - name: MODEL_DEPLOYMENT_NAME
+ value: gpt-4.1-mini
+resources:
+ - kind: model
+ id: gpt-4.1-mini
+ name: chat
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
new file mode 100644
index 0000000000..4f2e87e097
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
@@ -0,0 +1,52 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+### Simple hotel search - budget under $200
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under $200 per night",
+ "stream": false
+}
+
+### Hotel search with higher budget
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "Find me hotels in Seattle for March 20-23, 2025 under $250 per night",
+ "stream": false
+}
+
+### Ask for recommendations without dates (agent should ask for clarification)
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "What hotels do you recommend in Seattle?",
+ "stream": false
+}
+
+### Explicit input format
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "I'm looking for a hotel in Seattle from 2025-04-01 to 2025-04-05, my budget is $150 per night maximum"
+ }
+ ]
+ }
+ ],
+ "stream": false
+}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
new file mode 100644
index 0000000000..e1e30873e5
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
@@ -0,0 +1,69 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile
new file mode 100644
index 0000000000..c9f39f9574
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentWithTools.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
new file mode 100644
index 0000000000..59892eb670
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
@@ -0,0 +1,43 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates how to use Foundry tools (MCP and code interpreter)
+// with an AI agent hosted using the Azure AI AgentServer SDK.
+
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+
+var openAiEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var toolConnectionId = Environment.GetEnvironmentVariable("MCP_TOOL_CONNECTION_ID") ?? throw new InvalidOperationException("MCP_TOOL_CONNECTION_ID is not set.");
+
+var credential = new AzureCliCredential();
+
+var chatClient = new AzureOpenAIClient(new Uri(openAiEndpoint), credential)
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .AsBuilder()
+ .UseFoundryTools(new { type = "mcp", project_connection_id = toolConnectionId }, new { type = "code_interpreter" })
+ .UseOpenTelemetry(sourceName: "Agents", configure: (cfg) => cfg.EnableSensitiveData = true)
+ .Build();
+
+var agent = new ChatClientAgent(chatClient,
+ name: "AgentWithTools",
+ instructions: @"You are a helpful assistant with access to tools for fetching Microsoft documentation.
+
+ IMPORTANT: When the user asks about Microsoft Learn articles or documentation:
+ 1. You MUST use the microsoft_docs_fetch tool to retrieve the actual content
+ 2. Do NOT rely on your training data
+ 3. Always fetch the latest information from the provided URL
+
+ Available tools:
+ - microsoft_docs_fetch: Fetches and converts Microsoft Learn documentation
+ - microsoft_docs_search: Searches Microsoft/Azure documentation
+ - microsoft_code_sample_search: Searches for code examples")
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: (cfg) => cfg.EnableSensitiveData = true)
+ .Build();
+
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
new file mode 100644
index 0000000000..85b89d5cce
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
@@ -0,0 +1,125 @@
+# What this sample demonstrates
+
+This sample demonstrates how to use Foundry tools with an AI agent via the `UseFoundryTools` extension. The agent is configured with two tool types: an MCP (Model Context Protocol) connection for fetching Microsoft Learn documentation and a code interpreter for running code when needed.
+
+Key features:
+
+- Configuring Foundry tools using `UseFoundryTools` with MCP and code interpreter
+- Connecting to an external MCP tool via a Foundry project connection
+- Using `AzureCliCredential` for Azure authentication
+- OpenTelemetry instrumentation for both the chat client and agent
+
+## Prerequisites
+
+Before running this sample, ensure you have:
+
+1. **.NET 10 SDK** or later installed
+2. **Azure CLI** installed and authenticated (`az login`)
+3. An **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
+4. The **Azure AI Developer** role assigned on the Foundry resource (see [Role Assignment](#step-3-assign-the-azure-ai-developer-role) below)
+5. An **MCP tool connection** configured in your Foundry project (see [MCP Tool Setup](#step-2-create-the-mcp-tool-connection) below)
+
+## Setup Guide
+
+### Step 1: Authenticate with Azure CLI
+
+Make sure you're logged in with the account that has access to your Azure AI Foundry project:
+
+```powershell
+az login
+az account show # Verify the correct subscription is selected
+```
+
+### Step 2: Create the MCP Tool Connection
+
+The agent uses a Foundry MCP tool connection to access Microsoft Learn documentation tools. You need to create this connection in your Azure AI Foundry project.
+
+1. Go to the [Azure AI Foundry portal](https://ai.azure.com)
+2. Navigate to your project
+3. Go to **Connected resources** → **+ New connection** → **Model Context Protocol tool**
+4. Fill in the following:
+ - **Name**: `SampleMCPTool` (or any name you prefer)
+ - **Remote MCP Server endpoint**: `https://learn.microsoft.com/api/mcp`
+ - **Authentication**: `Unauthenticated`
+5. Click **Connect**
+
+The connection **name** you chose (e.g., `SampleMCPTool`) is the value you'll use for `MCP_TOOL_CONNECTION_ID`.
+
+### Step 3: Assign the Azure AI Developer Role
+
+The `UseFoundryTools` extension requires the `Microsoft.CognitiveServices/accounts/AIServices/agents/write` data action to resolve and invoke MCP tools. This is included in the **Azure AI Developer** role.
+
+Even if you created the Foundry project, you may not have this role by default. To assign it:
+
+```powershell
+# Replace with your user email and resource path
+az role assignment create `
+ --role "Azure AI Developer" `
+ --assignee "your-email@microsoft.com" `
+ --scope "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.CognitiveServices/accounts/{account-name}"
+```
+
+> **Note**: You need **Owner** or **User Access Administrator** permissions on the resource to assign roles. If you don't have this, you may need to request JIT (Just-In-Time) elevated access via [Azure PIM](https://portal.azure.com/#view/Microsoft_Azure_PIMCommon/ActivationMenuBlade/~/aadmigratedresource) first.
+
+For more details on permissions, see [Azure AI Foundry Permissions](https://aka.ms/FoundryPermissions).
+
+### Step 4: Set Environment Variables
+
+The sample requires `AZURE_OPENAI_ENDPOINT` and `AZURE_AI_PROJECT_ENDPOINT`. The `UseFoundryTools` extension internally uses `AZURE_AI_PROJECT_ENDPOINT` to resolve tool connections.
+
+```powershell
+# Your Azure OpenAI endpoint
+$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
+
+# Your Azure AI Foundry project endpoint (required by UseFoundryTools)
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.services.ai.azure.com/api/projects/your-project"
+
+# Chat model deployment name (defaults to gpt-4o-mini if not set)
+$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.2"
+
+# The MCP tool connection name (just the name, not the full ARM resource ID)
+$env:MCP_TOOL_CONNECTION_ID="SampleMCPTool"
+```
+
+> **Important**: `MCP_TOOL_CONNECTION_ID` should be the connection **name** only (e.g., `SampleMCPTool`), not the full ARM resource path.
+
+## Running the Sample
+
+```powershell
+dotnet run
+```
+
+This starts the hosted agent locally on `http://localhost:8088/`.
+
+### Interacting with the Agent
+
+You can use the `run-requests.http` file in this directory, or send requests directly:
+
+```powershell
+$body = @{ input = "Search for Azure AI Agent Service documentation" } | ConvertTo-Json
+Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $body -ContentType "application/json"
+```
+
+## How It Works
+
+1. An `AzureOpenAIClient` is created with `AzureCliCredential` and used to get a chat client
+2. The chat client is wrapped with `UseFoundryTools` which registers two Foundry tool types:
+ - **MCP connection**: Connects to an external MCP server (Microsoft Learn) via the project connection name, providing documentation fetch and search capabilities
+ - **Code interpreter**: Allows the agent to execute code snippets when needed
+3. `UseFoundryTools` resolves the connection using `AZURE_AI_PROJECT_ENDPOINT` internally
+4. A `ChatClientAgent` is created with instructions guiding it to use the MCP tools for documentation queries
+5. The agent is hosted using `RunAIAgentAsync` which exposes the OpenAI Responses-compatible API endpoint
+
+## Troubleshooting
+
+### `PermissionDenied` — lacks `agents/write` data action
+
+Assign the **Azure AI Developer** role to your user on the Cognitive Services resource. See [Step 3](#step-3-assign-the-azure-ai-developer-role).
+
+### `Project connection ... was not found`
+
+Make sure `MCP_TOOL_CONNECTION_ID` contains only the connection **name** (e.g., `SampleMCPTool`), not the full ARM resource ID path.
+
+### `AZURE_AI_PROJECT_ENDPOINT must be set`
+
+The `UseFoundryTools` extension requires `AZURE_AI_PROJECT_ENDPOINT` to be set, even though `Program.cs` reads `AZURE_OPENAI_ENDPOINT`. Both must be configured. See [Step 4](#step-4-set-environment-variables).
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml
new file mode 100644
index 0000000000..5d2b1f8d8d
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml
@@ -0,0 +1,31 @@
+name: AgentWithTools
+displayName: "Agent with Tools"
+description: >
+ An AI agent that uses Foundry tools (MCP and code interpreter) with Azure OpenAI.
+ The agent can fetch Microsoft Learn documentation and run code when needed.
+metadata:
+ authors:
+ - Microsoft Agent Framework Team
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Tools
+ - MCP
+ - Code Interpreter
+template:
+ kind: hosted
+ name: AgentWithTools
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_OPENAI_ENDPOINT
+ value: ${AZURE_OPENAI_ENDPOINT}
+ - name: AZURE_OPENAI_DEPLOYMENT_NAME
+ value: gpt-4o-mini
+ - name: MCP_TOOL_CONNECTION_ID
+ value: ${MCP_TOOL_CONNECTION_ID}
+resources:
+ - name: "gpt-4o-mini"
+ kind: model
+ id: gpt-4o-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http
new file mode 100644
index 0000000000..22a37ff54e
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http
@@ -0,0 +1,30 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+### Simple string input
+POST {{endpoint}}
+Content-Type: application/json
+{
+ "input": "Please use the microsoft_docs_fetch tool to fetch and summarize the Microsoft Learn article at https://learn.microsoft.com/azure/ai-services/openai/overview"
+}
+
+### Explicit input
+POST {{endpoint}}
+Content-Type: application/json
+{
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Please use the microsoft_docs_fetch tool to fetch and summarize the Microsoft Learn article at https://learn.microsoft.com/azure/ai-services/openai/overview"
+ }
+ ]
+ }
+ ]
+}
From fff6cea9549cd99db366eeb8950488b6e5beedbb Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 15:57:56 +0000
Subject: [PATCH 2/6] Add root HostedAgents README, replace test_requests.py
with .http, update sample READMEs
- Create root README.md with shared prerequisites, Azure AI Foundry setup,
troubleshooting, and samples index
- Replace test_requests.py with run-requests.http in AgentThreadAndHITL
- Add pointer to root README in all 6 sample READMEs
- Trim AgentWithTools README to concise style
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../HostedAgents/AgentThreadAndHITL/README.md | 4 +-
.../AgentThreadAndHITL/run-requests.http | 70 ++++++++++
.../AgentThreadAndHITL/test_requests.py | 92 -------------
.../HostedAgents/AgentWithHostedMCP/README.md | 2 +
.../AgentWithLocalTools/README.md | 2 +
.../AgentWithTextSearchRag/README.md | 2 +
.../HostedAgents/AgentWithTools/README.md | 96 ++------------
.../HostedAgents/AgentsInWorkflows/README.md | 2 +
.../05-end-to-end/HostedAgents/README.md | 125 ++++++++++++++++++
9 files changed, 214 insertions(+), 181 deletions(-)
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
delete mode 100644 dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
create mode 100644 dotnet/samples/05-end-to-end/HostedAgents/README.md
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
index 754142b7bb..f2d9a65103 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
@@ -7,6 +7,8 @@ Key features:
- Persisting conversation threads across multiple requests
- Approving or rejecting tool invocations at runtime
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
@@ -41,4 +43,4 @@ The sample uses `ApprovalRequiredAIFunction` to wrap standard AI function tools.
Thread persistence is handled by `InMemoryAgentThreadRepository`, which stores conversation history keyed by `conversation.id`. This means the HITL flow works across multiple HTTP requests as long as each request includes the same `conversation.id`.
-> **Note:** HITL requires a stable `conversation.id` in every request so the agent can correlate the approval response with the original function call.
+> **Note:** HITL requires a stable `conversation.id` in every request so the agent can correlate the approval response with the original function call. Use the `run-requests.http` file in this directory to test the full approval flow.
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
new file mode 100644
index 0000000000..196a30a542
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
@@ -0,0 +1,70 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+###
+# HITL (Human-in-the-Loop) Flow
+#
+# This sample requires a multi-turn conversation to demonstrate the approval flow:
+# 1. Send a request that triggers a tool call (e.g., asking about the weather)
+# 2. The agent responds with a function_call named "__hosted_agent_adapter_hitl__"
+# containing the call_id and the tool details
+# 3. Send a follow-up request with a function_call_output to approve or reject
+#
+# IMPORTANT: You must use the same conversation.id across all requests in a flow,
+# and update the call_id from step 2 into step 3.
+###
+
+### Step 1: Send initial request (triggers HITL approval)
+# @name initialRequest
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "What is the weather like in Vancouver?",
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
+
+### Step 2: Approve the function call
+# Copy the call_id from the Step 1 response output and replace below.
+# The response will contain: "name": "__hosted_agent_adapter_hitl__" with a "call_id" value.
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "function_call_output",
+ "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
+ "output": "approve"
+ }
+ ],
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
+
+### Step 3 (alternative): Reject the function call
+# Use this instead of Step 2 to deny the tool execution.
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "function_call_output",
+ "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
+ "output": "reject"
+ }
+ ],
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
deleted file mode 100644
index 025be5e45a..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/test_requests.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import json
-import os
-import secrets
-import string
-
-import requests
-
-
-base_url = os.getenv("AGENTSERVER_URL", "http://localhost:8088").rstrip("/")
-url = base_url if base_url.endswith("/responses") else f"{base_url}/responses"
-stream = False
-
-
-alphanum = string.ascii_letters + string.digits
-
-
-def create_conversation_id():
- # Match AgentServer expected format: conv_<18-char partition><32-char entropy>
- return "conv_" + "".join(secrets.choice(alphanum) for _ in range(50))
-
-
-def extract_conversation_id(response_detail):
- conversation = response_detail.get("conversation")
- if isinstance(conversation, dict):
- conversation_id = conversation.get("id")
- if isinstance(conversation_id, str) and conversation_id:
- return conversation_id
- return None
-
-
-user_input = "What is the weather like in Vancouver?"
-conversation_id = create_conversation_id()
-payload = {
- "agent": {"name": "local_agent", "type": "agent_reference"},
- "tools": [],
- "stream": stream,
- "input": user_input,
- "conversation": {"id": conversation_id},
-}
-
-call_id = None
-
-try:
- response = requests.post(url, json=payload)
- response.raise_for_status()
-
- response_detail = response.json()
- print(json.dumps(response_detail, indent=2))
-
- returned_conversation_id = extract_conversation_id(response_detail)
- if returned_conversation_id:
- conversation_id = returned_conversation_id
-
- output = response_detail.get("output", [])
- if isinstance(output, list):
- for item in output:
- if item.get("type") == "function_call" and item.get("name") == "__hosted_agent_adapter_hitl__":
- call_id = item.get("call_id")
- break
-except Exception as e:
- print(f"Error: {e}")
-
-print("\n\n")
-print(f"conversation_id: {conversation_id}")
-print(f"call_id: {call_id}")
-
-if not call_id:
- print("Failed to parse hitl request info")
-else:
- human_feedback = {
- "call_id": call_id,
- "output": "approve",
- "type": "function_call_output",
- }
-
- feedback_payload = {
- "agent": {"name": "local_agent", "type": "agent_reference"},
- "tools": [],
- "stream": stream,
- "input": [human_feedback],
- "conversation": {"id": conversation_id},
- }
-
- try:
- print("\n\nsending feedback...")
- print(json.dumps(feedback_payload, indent=2))
- response = requests.post(url, json=feedback_payload)
- response.raise_for_status()
- print("\n\nagent response:")
- print(json.dumps(response.json(), indent=2))
- except Exception as e:
- print(f"Error: {e}")
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
index a5648d7ac9..8d8ddba330 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
@@ -8,6 +8,8 @@ Key features:
- Filtering available tools from an MCP server
- Using Azure OpenAI Responses with MCP tools
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
index 6d791ce7b7..ee504dc1f4 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
@@ -8,6 +8,8 @@ Key features:
- Building a `ChatClientAgent` with custom instructions and tools
- Deploying to the Foundry Hosted Agent service
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
index 614597bed9..396bc1bc9b 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
@@ -8,6 +8,8 @@ Key features:
- Managing conversation memory with a rolling window approach
- Citing source documents in AI responses
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
index 85b89d5cce..a456b24183 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
@@ -9,68 +9,21 @@ Key features:
- Using `AzureCliCredential` for Azure authentication
- OpenTelemetry instrumentation for both the chat client and agent
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. **.NET 10 SDK** or later installed
-2. **Azure CLI** installed and authenticated (`az login`)
-3. An **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
-4. The **Azure AI Developer** role assigned on the Foundry resource (see [Role Assignment](#step-3-assign-the-azure-ai-developer-role) below)
-5. An **MCP tool connection** configured in your Foundry project (see [MCP Tool Setup](#step-2-create-the-mcp-tool-connection) below)
-
-## Setup Guide
-
-### Step 1: Authenticate with Azure CLI
-
-Make sure you're logged in with the account that has access to your Azure AI Foundry project:
-
-```powershell
-az login
-az account show # Verify the correct subscription is selected
-```
-
-### Step 2: Create the MCP Tool Connection
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-The agent uses a Foundry MCP tool connection to access Microsoft Learn documentation tools. You need to create this connection in your Azure AI Foundry project.
-
-1. Go to the [Azure AI Foundry portal](https://ai.azure.com)
-2. Navigate to your project
-3. Go to **Connected resources** → **+ New connection** → **Model Context Protocol tool**
-4. Fill in the following:
- - **Name**: `SampleMCPTool` (or any name you prefer)
- - **Remote MCP Server endpoint**: `https://learn.microsoft.com/api/mcp`
- - **Authentication**: `Unauthenticated`
-5. Click **Connect**
-
-The connection **name** you chose (e.g., `SampleMCPTool`) is the value you'll use for `MCP_TOOL_CONNECTION_ID`.
-
-### Step 3: Assign the Azure AI Developer Role
-
-The `UseFoundryTools` extension requires the `Microsoft.CognitiveServices/accounts/AIServices/agents/write` data action to resolve and invoke MCP tools. This is included in the **Azure AI Developer** role.
-
-Even if you created the Foundry project, you may not have this role by default. To assign it:
-
-```powershell
-# Replace with your user email and resource path
-az role assignment create `
- --role "Azure AI Developer" `
- --assignee "your-email@microsoft.com" `
- --scope "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.CognitiveServices/accounts/{account-name}"
-```
+## Prerequisites
-> **Note**: You need **Owner** or **User Access Administrator** permissions on the resource to assign roles. If you don't have this, you may need to request JIT (Just-In-Time) elevated access via [Azure PIM](https://portal.azure.com/#view/Microsoft_Azure_PIMCommon/ActivationMenuBlade/~/aadmigratedresource) first.
+In addition to the common prerequisites:
-For more details on permissions, see [Azure AI Foundry Permissions](https://aka.ms/FoundryPermissions).
+1. An **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
+2. The **Azure AI Developer** role assigned on the Foundry resource (includes the `agents/write` data action required by `UseFoundryTools`)
+3. An **MCP tool connection** configured in your Foundry project pointing to `https://learn.microsoft.com/api/mcp`
-### Step 4: Set Environment Variables
+## Environment Variables
-The sample requires `AZURE_OPENAI_ENDPOINT` and `AZURE_AI_PROJECT_ENDPOINT`. The `UseFoundryTools` extension internally uses `AZURE_AI_PROJECT_ENDPOINT` to resolve tool connections.
+In addition to the common environment variables in the root README:
```powershell
-# Your Azure OpenAI endpoint
-$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
-
# Your Azure AI Foundry project endpoint (required by UseFoundryTools)
$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.services.ai.azure.com/api/projects/your-project"
@@ -81,25 +34,6 @@ $env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.2"
$env:MCP_TOOL_CONNECTION_ID="SampleMCPTool"
```
-> **Important**: `MCP_TOOL_CONNECTION_ID` should be the connection **name** only (e.g., `SampleMCPTool`), not the full ARM resource path.
-
-## Running the Sample
-
-```powershell
-dotnet run
-```
-
-This starts the hosted agent locally on `http://localhost:8088/`.
-
-### Interacting with the Agent
-
-You can use the `run-requests.http` file in this directory, or send requests directly:
-
-```powershell
-$body = @{ input = "Search for Azure AI Agent Service documentation" } | ConvertTo-Json
-Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $body -ContentType "application/json"
-```
-
## How It Works
1. An `AzureOpenAIClient` is created with `AzureCliCredential` and used to get a chat client
@@ -109,17 +43,3 @@ Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $bod
3. `UseFoundryTools` resolves the connection using `AZURE_AI_PROJECT_ENDPOINT` internally
4. A `ChatClientAgent` is created with instructions guiding it to use the MCP tools for documentation queries
5. The agent is hosted using `RunAIAgentAsync` which exposes the OpenAI Responses-compatible API endpoint
-
-## Troubleshooting
-
-### `PermissionDenied` — lacks `agents/write` data action
-
-Assign the **Azure AI Developer** role to your user on the Cognitive Services resource. See [Step 3](#step-3-assign-the-azure-ai-developer-role).
-
-### `Project connection ... was not found`
-
-Make sure `MCP_TOOL_CONNECTION_ID` contains only the connection **name** (e.g., `SampleMCPTool`), not the full ARM resource ID path.
-
-### `AZURE_AI_PROJECT_ENDPOINT must be set`
-
-The `UseFoundryTools` extension requires `AZURE_AI_PROJECT_ENDPOINT` to be set, even though `Program.cs` reads `AZURE_OPENAI_ENDPOINT`. Both must be configured. See [Step 4](#step-4-set-environment-variables).
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
index 5f6babc755..72019bbf22 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
@@ -9,6 +9,8 @@ This workflow uses three translation agents:
The agents are connected sequentially, creating a translation chain that demonstrates how AI-powered components can be seamlessly integrated into workflow pipelines.
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before you begin, ensure you have the following prerequisites:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/README.md b/dotnet/samples/05-end-to-end/HostedAgents/README.md
new file mode 100644
index 0000000000..a45539f41c
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/README.md
@@ -0,0 +1,125 @@
+# Hosted Agent Samples
+
+These samples demonstrate how to build and host AI agents using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme). Each sample can be run locally and deployed to Microsoft Foundry as a hosted agent.
+
+## Samples
+
+| Sample | Description |
+|--------|-------------|
+| [`AgentWithTools`](./AgentWithTools/) | Foundry tools (MCP + code interpreter) via `UseFoundryTools` |
+| [`AgentWithLocalTools`](./AgentWithLocalTools/) | Local C# function tool execution (Seattle hotel search) |
+| [`AgentThreadAndHITL`](./AgentThreadAndHITL/) | Human-in-the-loop with `ApprovalRequiredAIFunction` and thread persistence |
+| [`AgentWithHostedMCP`](./AgentWithHostedMCP/) | Hosted MCP server tool (Microsoft Learn search) |
+| [`AgentWithTextSearchRag`](./AgentWithTextSearchRag/) | RAG with `TextSearchProvider` (Contoso Outdoors) |
+| [`AgentsInWorkflows`](./AgentsInWorkflows/) | Sequential workflow pipeline (translation chain) |
+
+## Common Prerequisites
+
+Before running any sample, ensure you have:
+
+1. **.NET 10 SDK** or later — [Download](https://dotnet.microsoft.com/download/dotnet/10.0)
+2. **Azure CLI** installed — [Install guide](https://learn.microsoft.com/cli/azure/install-azure-cli)
+3. **Azure OpenAI** or **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
+
+### Authenticate with Azure CLI
+
+All samples use `AzureCliCredential` for authentication. Make sure you're logged in:
+
+```powershell
+az login
+az account show # Verify the correct subscription
+```
+
+### Common Environment Variables
+
+Most samples require one or more of these environment variables:
+
+| Variable | Used By | Description |
+|----------|---------|-------------|
+| `AZURE_OPENAI_ENDPOINT` | Most samples | Your Azure OpenAI resource endpoint URL |
+| `AZURE_OPENAI_DEPLOYMENT_NAME` | Most samples | Chat model deployment name (defaults to `gpt-4o-mini`) |
+| `AZURE_AI_PROJECT_ENDPOINT` | AgentWithTools, AgentWithLocalTools | Azure AI Foundry project endpoint |
+| `MCP_TOOL_CONNECTION_ID` | AgentWithTools | Foundry MCP tool connection name |
+| `MODEL_DEPLOYMENT_NAME` | AgentWithLocalTools | Chat model deployment name (defaults to `gpt-4.1-mini`) |
+
+See each sample's README for the specific variables required.
+
+## Azure AI Foundry Setup (for samples that use Foundry)
+
+Some samples (`AgentWithTools`, `AgentWithLocalTools`) connect to an Azure AI Foundry project. If you're using these samples, you'll need additional setup.
+
+### Azure AI Developer Role
+
+The `UseFoundryTools` extension requires the **Azure AI Developer** role on the Cognitive Services resource. Even if you created the project, you may not have this role by default.
+
+```powershell
+az role assignment create `
+ --role "Azure AI Developer" `
+ --assignee "your-email@microsoft.com" `
+ --scope "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.CognitiveServices/accounts/{account-name}"
+```
+
+> **Note**: You need **Owner** or **User Access Administrator** permissions on the resource to assign roles. If you don't have this, you may need to request JIT (Just-In-Time) elevated access via [Azure PIM](https://portal.azure.com/#view/Microsoft_Azure_PIMCommon/ActivationMenuBlade/~/aadmigratedresource).
+
+For more details on permissions, see [Azure AI Foundry Permissions](https://aka.ms/FoundryPermissions).
+
+### Creating an MCP Tool Connection
+
+The `AgentWithTools` sample requires an MCP tool connection configured in your Foundry project:
+
+1. Go to the [Azure AI Foundry portal](https://ai.azure.com)
+2. Navigate to your project
+3. Go to **Connected resources** → **+ New connection** → **Model Context Protocol tool**
+4. Fill in:
+ - **Name**: `SampleMCPTool` (or any name you prefer)
+ - **Remote MCP Server endpoint**: `https://learn.microsoft.com/api/mcp`
+ - **Authentication**: `Unauthenticated`
+5. Click **Connect**
+
+The connection **name** (e.g., `SampleMCPTool`) is used as the `MCP_TOOL_CONNECTION_ID` environment variable.
+
+> **Important**: Use only the connection **name**, not the full ARM resource ID.
+
+## Running a Sample
+
+Each sample runs as a standalone hosted agent on `http://localhost:8088/`:
+
+```powershell
+cd
+dotnet run
+```
+
+### Interacting with the Agent
+
+Each sample includes a `run-requests.http` file for testing with the [VS Code REST Client](https://marketplace.visualstudio.com/items?itemName=humao.rest-client) extension, or you can use PowerShell:
+
+```powershell
+$body = @{ input = "Your question here" } | ConvertTo-Json
+Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $body -ContentType "application/json"
+```
+
+## Deploying to Microsoft Foundry
+
+Each sample includes a `Dockerfile` and `agent.yaml` for deployment. To deploy your agent to Microsoft Foundry, follow the [hosted agents deployment guide](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents).
+
+## Troubleshooting
+
+### `PermissionDenied` — lacks `agents/write` data action
+
+Assign the **Azure AI Developer** role to your user. See [Azure AI Developer Role](#azure-ai-developer-role) above.
+
+### `Project connection ... was not found`
+
+Make sure `MCP_TOOL_CONNECTION_ID` contains only the connection **name** (e.g., `SampleMCPTool`), not the full ARM resource ID path.
+
+### `AZURE_AI_PROJECT_ENDPOINT must be set`
+
+The `UseFoundryTools` extension requires `AZURE_AI_PROJECT_ENDPOINT`. Set it to your Foundry project endpoint (e.g., `https://your-resource.services.ai.azure.com/api/projects/your-project`).
+
+### Multi-framework error when running `dotnet run`
+
+If you see "Your project targets multiple frameworks", specify the framework:
+
+```powershell
+dotnet run --framework net10.0
+```
From f56897e1016a8a08f18037ae5d295c85e5864af3 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 16:47:06 +0000
Subject: [PATCH 3/6] Fix dotnet format issues in
AgentWithLocalTools/Program.cs
- Add UTF-8 BOM (CHARSET)
- Sort System.ClientModel.Primitives import alphabetically (IMPORTS)
- Use target-typed new for AIProjectClient (IDE0090)
- Add internal accessibility modifier to Hotel record (IDE0040)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../HostedAgents/AgentWithLocalTools/Program.cs | 8 ++++----
.../05-end-to-end/HostedAgents/AgentWithTools/Program.cs | 4 ++--
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
index 35a875eb8d..75081a5fd1 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
@@ -1,13 +1,13 @@
-// Copyright (c) Microsoft. All rights reserved.
+// Copyright (c) Microsoft. All rights reserved.
// Seattle Hotel Agent - A simple agent with a tool to find hotels in Seattle.
// Uses Microsoft Agent Framework with Azure AI Foundry.
// Ready for deployment to Foundry Hosted Agent service.
+using System.ClientModel.Primitives;
using System.ComponentModel;
using System.Globalization;
using System.Text;
-using System.ClientModel.Primitives;
using Azure.AI.AgentServer.AgentFramework.Extensions;
using Azure.AI.OpenAI;
using Azure.AI.Projects;
@@ -85,7 +85,7 @@ string GetAvailableHotels(
}
var credential = new AzureCliCredential();
-AIProjectClient projectClient = new AIProjectClient(new Uri(endpoint), credential);
+AIProjectClient projectClient = new(new Uri(endpoint), credential);
ClientConnection connection = projectClient.GetConnection(typeof(AzureOpenAIClient).FullName!);
@@ -126,4 +126,4 @@ politely let them know you specialize in Seattle hotel recommendations.
Console.WriteLine("Seattle Hotel Agent Server running on http://localhost:8088");
await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
-sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
+internal sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
index 59892eb670..3bb68d6e31 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
@@ -4,10 +4,10 @@
// with an AI agent hosted using the Azure AI AgentServer SDK.
using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
using Azure.AI.OpenAI;
using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
var openAiEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
From 1585ff5e4866734ffc2e3f898674ddcffe88f82f Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 16:53:01 +0000
Subject: [PATCH 4/6] Address PR review: align model names and package versions
- Change default model from gpt-4.1-mini to gpt-4o-mini in AgentWithLocalTools
(Program.cs, agent.yaml, README.md) to match existing samples
- Change README example from gpt-5.2 to gpt-4o-mini in AgentWithTools and root README
- Align AgentWithLocalTools package versions with other samples:
Azure.AI.AgentServer.AgentFramework beta.6 -> beta.8
Azure.AI.OpenAI 2.8.0-beta.1 -> 2.7.0-beta.2
Microsoft.Extensions.AI.OpenAI 10.2.0-preview -> 10.1.1-preview
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../AgentWithLocalTools/AgentWithLocalTools.csproj | 6 +++---
.../HostedAgents/AgentWithLocalTools/Program.cs | 2 +-
.../HostedAgents/AgentWithLocalTools/README.md | 6 +++---
.../HostedAgents/AgentWithLocalTools/agent.yaml | 4 ++--
.../05-end-to-end/HostedAgents/AgentWithTools/README.md | 2 +-
dotnet/samples/05-end-to-end/HostedAgents/README.md | 4 ++--
6 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
index ee737683ac..d541528208 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
@@ -36,11 +36,11 @@
-
+
-
+
-
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
index 75081a5fd1..72eb938047 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
@@ -17,7 +17,7 @@
var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-4.1-mini";
+var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
Console.WriteLine($"Project Endpoint: {endpoint}");
Console.WriteLine($"Model Deployment: {deploymentName}");
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
index ee504dc1f4..c080331a87 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
@@ -15,7 +15,7 @@ Key features:
Before running this sample, ensure you have:
1. .NET 10 SDK installed
-2. An Azure AI Foundry Project with a chat model deployed (e.g., gpt-4.1-mini)
+2. An Azure AI Foundry Project with a chat model deployed (e.g., gpt-4o-mini)
3. Azure CLI installed and authenticated (`az login`)
## Environment Variables
@@ -26,8 +26,8 @@ Set the following environment variables:
# Replace with your Azure AI Foundry project endpoint
$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com/api/projects/your-project-name"
-# Optional, defaults to gpt-4.1-mini
-$env:MODEL_DEPLOYMENT_NAME="gpt-4.1-mini"
+# Optional, defaults to gpt-4o-mini
+$env:MODEL_DEPLOYMENT_NAME="gpt-4o-mini"
```
## How It Works
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
index 3f51fc842f..e60d9ccadf 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
@@ -22,8 +22,8 @@ template:
- name: AZURE_AI_PROJECT_ENDPOINT
value: ${AZURE_AI_PROJECT_ENDPOINT}
- name: MODEL_DEPLOYMENT_NAME
- value: gpt-4.1-mini
+ value: gpt-4o-mini
resources:
- kind: model
- id: gpt-4.1-mini
+ id: gpt-4o-mini
name: chat
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
index a456b24183..5a80ecda9f 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
@@ -28,7 +28,7 @@ In addition to the common environment variables in the root README:
$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.services.ai.azure.com/api/projects/your-project"
# Chat model deployment name (defaults to gpt-4o-mini if not set)
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.2"
+$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini"
# The MCP tool connection name (just the name, not the full ARM resource ID)
$env:MCP_TOOL_CONNECTION_ID="SampleMCPTool"
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/README.md b/dotnet/samples/05-end-to-end/HostedAgents/README.md
index a45539f41c..f7a3bdc94b 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/README.md
@@ -19,7 +19,7 @@ Before running any sample, ensure you have:
1. **.NET 10 SDK** or later — [Download](https://dotnet.microsoft.com/download/dotnet/10.0)
2. **Azure CLI** installed — [Install guide](https://learn.microsoft.com/cli/azure/install-azure-cli)
-3. **Azure OpenAI** or **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
+3. **Azure OpenAI** or **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-4o-mini`)
### Authenticate with Azure CLI
@@ -40,7 +40,7 @@ Most samples require one or more of these environment variables:
| `AZURE_OPENAI_DEPLOYMENT_NAME` | Most samples | Chat model deployment name (defaults to `gpt-4o-mini`) |
| `AZURE_AI_PROJECT_ENDPOINT` | AgentWithTools, AgentWithLocalTools | Azure AI Foundry project endpoint |
| `MCP_TOOL_CONNECTION_ID` | AgentWithTools | Foundry MCP tool connection name |
-| `MODEL_DEPLOYMENT_NAME` | AgentWithLocalTools | Chat model deployment name (defaults to `gpt-4.1-mini`) |
+| `MODEL_DEPLOYMENT_NAME` | AgentWithLocalTools | Chat model deployment name (defaults to `gpt-4o-mini`) |
See each sample's README for the specific variables required.
From f171e68e66d2c6d53c8423d053b5ee5962cf8343 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 16:56:23 +0000
Subject: [PATCH 5/6] Upgrade new samples to latest package versions
- Azure.AI.OpenAI: 2.7.0-beta.2 -> 2.8.0-beta.1
- Microsoft.Extensions.AI.OpenAI: 10.1.1-preview -> 10.3.0
Aligns with AgentWithHostedMCP which uses the latest versions.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj | 4 ++--
.../AgentWithLocalTools/AgentWithLocalTools.csproj | 4 ++--
.../HostedAgents/AgentWithTools/AgentWithTools.csproj | 4 ++--
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
index 1dee424664..4cc630e4ef 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
@@ -37,10 +37,10 @@
-
+
-
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
index d541528208..43cdbfb025 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
@@ -38,9 +38,9 @@
-
+
-
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
index e1e30873e5..ce8a739757 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
@@ -36,10 +36,10 @@
-
+
-
+
From 794a2ec340124d740407d3ca484c99d0b92a137c Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 26 Feb 2026 17:07:33 +0000
Subject: [PATCH 6/6] Pin AgentThreadAndHITL to Microsoft.Extensions.AI.OpenAI
10.1.1
Azure.AI.AgentServer.AgentFramework beta.8 was compiled against
Microsoft.Extensions.AI.Abstractions with the single-param
FunctionApprovalRequestContent.CreateResponse(bool). Version 10.3.0
changed the signature to include an optional reason parameter, causing
a binary incompatibility at runtime. Pin to 10.1.1 until the framework
is recompiled against the newer abstractions.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
index 4cc630e4ef..17b90fd6e2 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
@@ -40,7 +40,7 @@
-
+