diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
new file mode 100644
index 0000000000..17b90fd6e2
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
@@ -0,0 +1,70 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+ $(NoWarn);MEAI001
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
new file mode 100644
index 0000000000..004bd49fa8
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentThreadAndHITL.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
new file mode 100644
index 0000000000..305b9835ed
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence.
+// The agent wraps function tools with ApprovalRequiredAIFunction to require user approval
+// before invoking them. Users respond with 'approve' or 'reject' when prompted.
+
+using System.ComponentModel;
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Azure.AI.AgentServer.AgentFramework.Persistence;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+
+var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+
+[Description("Get the weather for a given location.")]
+static string GetWeather([Description("The location to get the weather for.")] string location)
+ => $"The weather in {location} is cloudy with a high of 15°C.";
+
+// Create the chat client and agent.
+// Note: ApprovalRequiredAIFunction wraps the tool to require user approval before invocation.
+// User should reply with 'approve' or 'reject' when prompted.
+#pragma warning disable MEAI001 // Type is for evaluation purposes only
+AIAgent agent = new AzureOpenAIClient(
+ new Uri(endpoint),
+ new AzureCliCredential())
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .CreateAIAgent(
+ instructions: "You are a helpful assistant",
+ tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]
+ );
+#pragma warning restore MEAI001
+
+var threadRepository = new InMemoryAgentThreadRepository(agent);
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents", threadRepository: threadRepository);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
new file mode 100644
index 0000000000..f2d9a65103
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
@@ -0,0 +1,46 @@
+# What this sample demonstrates
+
+This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence. The agent wraps function tools with `ApprovalRequiredAIFunction` so that every tool invocation requires explicit user approval before execution. Thread state is maintained across requests using `InMemoryAgentThreadRepository`.
+
+Key features:
+- Requiring human approval before executing function calls
+- Persisting conversation threads across multiple requests
+- Approving or rejecting tool invocations at runtime
+
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
+## Prerequisites
+
+Before running this sample, ensure you have:
+
+1. .NET 10 SDK installed
+2. An Azure OpenAI endpoint configured
+3. A deployment of a chat model (e.g., gpt-4o-mini)
+4. Azure CLI installed and authenticated (`az login`)
+
+## Environment Variables
+
+Set the following environment variables:
+
+```powershell
+# Replace with your Azure OpenAI endpoint
+$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
+
+# Optional, defaults to gpt-4o-mini
+$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini"
+```
+
+## How It Works
+
+The sample uses `ApprovalRequiredAIFunction` to wrap standard AI function tools. When the model decides to call a tool, the wrapper intercepts the invocation and returns a HITL approval request to the caller instead of executing the function immediately.
+
+1. The user sends a message (e.g., "What is the weather in Vancouver?")
+2. The model determines a function call is needed and selects the `GetWeather` tool
+3. `ApprovalRequiredAIFunction` intercepts the call and returns an approval request containing the function name and arguments
+4. The user responds with `approve` or `reject`
+5. If approved, the function executes and the model generates a response using the result
+6. If rejected, the model generates a response without the function result
+
+Thread persistence is handled by `InMemoryAgentThreadRepository`, which stores conversation history keyed by `conversation.id`. This means the HITL flow works across multiple HTTP requests as long as each request includes the same `conversation.id`.
+
+> **Note:** HITL requires a stable `conversation.id` in every request so the agent can correlate the approval response with the original function call. Use the `run-requests.http` file in this directory to test the full approval flow.
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
new file mode 100644
index 0000000000..aa78734283
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
@@ -0,0 +1,28 @@
+name: AgentThreadAndHITL
+displayName: "Weather Assistant Agent"
+description: >
+ A Weather Assistant Agent that provides weather information and forecasts. It
+ demonstrates how to use Azure AI AgentServer with Human-in-the-Loop (HITL)
+ capabilities to get human approval for functional calls.
+metadata:
+ authors:
+ - Microsoft Agent Framework Team
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Human-in-the-Loop
+template:
+ kind: hosted
+ name: AgentThreadAndHITL
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_OPENAI_ENDPOINT
+ value: ${AZURE_OPENAI_ENDPOINT}
+ - name: AZURE_OPENAI_DEPLOYMENT_NAME
+ value: gpt-4o-mini
+resources:
+ - name: "gpt-4o-mini"
+ kind: model
+ id: gpt-4o-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
new file mode 100644
index 0000000000..196a30a542
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
@@ -0,0 +1,70 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+###
+# HITL (Human-in-the-Loop) Flow
+#
+# This sample requires a multi-turn conversation to demonstrate the approval flow:
+# 1. Send a request that triggers a tool call (e.g., asking about the weather)
+# 2. The agent responds with a function_call named "__hosted_agent_adapter_hitl__"
+# containing the call_id and the tool details
+# 3. Send a follow-up request with a function_call_output to approve or reject
+#
+# IMPORTANT: You must use the same conversation.id across all requests in a flow,
+# and update the call_id from step 2 into step 3.
+###
+
+### Step 1: Send initial request (triggers HITL approval)
+# @name initialRequest
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "What is the weather like in Vancouver?",
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
+
+### Step 2: Approve the function call
+# Copy the call_id from the Step 1 response output and replace below.
+# The response will contain: "name": "__hosted_agent_adapter_hitl__" with a "call_id" value.
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "function_call_output",
+ "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
+ "output": "approve"
+ }
+ ],
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
+
+### Step 3 (alternative): Reject the function call
+# Use this instead of Step 2 to deny the tool execution.
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "function_call_output",
+ "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
+ "output": "reject"
+ }
+ ],
+ "stream": false,
+ "conversation": {
+ "id": "conv_test0000000000000000000000000000000000000000000000"
+ }
+}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
index a5648d7ac9..8d8ddba330 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
@@ -8,6 +8,8 @@ Key features:
- Filtering available tools from an MCP server
- Using Azure OpenAI Responses with MCP tools
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
new file mode 100644
index 0000000000..2afa2c2601
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
@@ -0,0 +1,24 @@
+**/.dockerignore
+**/.env
+**/.git
+**/.gitignore
+**/.project
+**/.settings
+**/.toolstarget
+**/.vs
+**/.vscode
+**/*.*proj.user
+**/*.dbmdl
+**/*.jfm
+**/azds.yaml
+**/bin
+**/charts
+**/docker-compose*
+**/Dockerfile*
+**/node_modules
+**/npm-debug.log
+**/obj
+**/secrets.dev.yaml
+**/values.dev.yaml
+LICENSE
+README.md
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
new file mode 100644
index 0000000000..43cdbfb025
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
@@ -0,0 +1,70 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+ true
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
new file mode 100644
index 0000000000..c2461965a4
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentWithLocalTools.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
new file mode 100644
index 0000000000..72eb938047
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// Seattle Hotel Agent - A simple agent with a tool to find hotels in Seattle.
+// Uses Microsoft Agent Framework with Azure AI Foundry.
+// Ready for deployment to Foundry Hosted Agent service.
+
+using System.ClientModel.Primitives;
+using System.ComponentModel;
+using System.Globalization;
+using System.Text;
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Azure.AI.OpenAI;
+using Azure.AI.Projects;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+Console.WriteLine($"Project Endpoint: {endpoint}");
+Console.WriteLine($"Model Deployment: {deploymentName}");
+
+var seattleHotels = new[]
+{
+ new Hotel("Contoso Suites", 189, 4.5, "Downtown"),
+ new Hotel("Fabrikam Residences", 159, 4.2, "Pike Place Market"),
+ new Hotel("Alpine Ski House", 249, 4.7, "Seattle Center"),
+ new Hotel("Margie's Travel Lodge", 219, 4.4, "Waterfront"),
+ new Hotel("Northwind Inn", 139, 4.0, "Capitol Hill"),
+ new Hotel("Relecloud Hotel", 99, 3.8, "University District"),
+};
+
+[Description("Get available hotels in Seattle for the specified dates. This simulates a call to a hotel availability API.")]
+string GetAvailableHotels(
+ [Description("Check-in date in YYYY-MM-DD format")] string checkInDate,
+ [Description("Check-out date in YYYY-MM-DD format")] string checkOutDate,
+ [Description("Maximum price per night in USD (optional, defaults to 500)")] int maxPrice = 500)
+{
+ try
+ {
+ if (!DateTime.TryParseExact(checkInDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkIn))
+ {
+ return "Error parsing check-in date. Please use YYYY-MM-DD format.";
+ }
+
+ if (!DateTime.TryParseExact(checkOutDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkOut))
+ {
+ return "Error parsing check-out date. Please use YYYY-MM-DD format.";
+ }
+
+ if (checkOut <= checkIn)
+ {
+ return "Error: Check-out date must be after check-in date.";
+ }
+
+ var nights = (checkOut - checkIn).Days;
+ var availableHotels = seattleHotels.Where(h => h.PricePerNight <= maxPrice).ToList();
+
+ if (availableHotels.Count == 0)
+ {
+ return $"No hotels found in Seattle within your budget of ${maxPrice}/night.";
+ }
+
+ var result = new StringBuilder();
+ result.AppendLine($"Available hotels in Seattle from {checkInDate} to {checkOutDate} ({nights} nights):");
+ result.AppendLine();
+
+ foreach (var hotel in availableHotels)
+ {
+ var totalCost = hotel.PricePerNight * nights;
+ result.AppendLine($"**{hotel.Name}**");
+ result.AppendLine($" Location: {hotel.Location}");
+ result.AppendLine($" Rating: {hotel.Rating}/5");
+ result.AppendLine($" ${hotel.PricePerNight}/night (Total: ${totalCost})");
+ result.AppendLine();
+ }
+
+ return result.ToString();
+ }
+ catch (Exception ex)
+ {
+ return $"Error processing request. Details: {ex.Message}";
+ }
+}
+
+var credential = new AzureCliCredential();
+AIProjectClient projectClient = new(new Uri(endpoint), credential);
+
+ClientConnection connection = projectClient.GetConnection(typeof(AzureOpenAIClient).FullName!);
+
+if (!connection.TryGetLocatorAsUri(out Uri? openAiEndpoint) || openAiEndpoint is null)
+{
+ throw new InvalidOperationException("Failed to get OpenAI endpoint from project connection.");
+}
+openAiEndpoint = new Uri($"https://{openAiEndpoint.Host}");
+Console.WriteLine($"OpenAI Endpoint: {openAiEndpoint}");
+
+var chatClient = new AzureOpenAIClient(openAiEndpoint, credential)
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
+ .Build();
+
+var agent = new ChatClientAgent(chatClient,
+ name: "SeattleHotelAgent",
+ instructions: """
+ You are a helpful travel assistant specializing in finding hotels in Seattle, Washington.
+
+ When a user asks about hotels in Seattle:
+ 1. Ask for their check-in and check-out dates if not provided
+ 2. Ask about their budget preferences if not mentioned
+ 3. Use the GetAvailableHotels tool to find available options
+ 4. Present the results in a friendly, informative way
+ 5. Offer to help with additional questions about the hotels or Seattle
+
+ Be conversational and helpful. If users ask about things outside of Seattle hotels,
+ politely let them know you specialize in Seattle hotel recommendations.
+ """,
+ tools: [AIFunctionFactory.Create(GetAvailableHotels)])
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
+ .Build();
+
+Console.WriteLine("Seattle Hotel Agent Server running on http://localhost:8088");
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
+
+internal sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
new file mode 100644
index 0000000000..c080331a87
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
@@ -0,0 +1,39 @@
+# What this sample demonstrates
+
+This sample demonstrates how to build a hosted agent that uses local C# function tools — a key advantage of code-based hosted agents over prompt agents. The agent acts as a Seattle travel assistant with a `GetAvailableHotels` tool that simulates querying a hotel availability API.
+
+Key features:
+- Defining local C# functions as agent tools using `AIFunctionFactory`
+- Using `AIProjectClient` to discover the OpenAI connection from the Azure AI Foundry project
+- Building a `ChatClientAgent` with custom instructions and tools
+- Deploying to the Foundry Hosted Agent service
+
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
+## Prerequisites
+
+Before running this sample, ensure you have:
+
+1. .NET 10 SDK installed
+2. An Azure AI Foundry Project with a chat model deployed (e.g., gpt-4o-mini)
+3. Azure CLI installed and authenticated (`az login`)
+
+## Environment Variables
+
+Set the following environment variables:
+
+```powershell
+# Replace with your Azure AI Foundry project endpoint
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com/api/projects/your-project-name"
+
+# Optional, defaults to gpt-4o-mini
+$env:MODEL_DEPLOYMENT_NAME="gpt-4o-mini"
+```
+
+## How It Works
+
+1. The agent uses `AIProjectClient` to discover the Azure OpenAI connection from the project endpoint
+2. A local C# function `GetAvailableHotels` is registered as a tool using `AIFunctionFactory.Create`
+3. When users ask about hotels, the model invokes the local tool to search simulated hotel data
+4. The tool filters hotels by price and calculates total costs based on the requested dates
+5. Results are returned to the model, which presents them in a conversational format
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
new file mode 100644
index 0000000000..e60d9ccadf
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
@@ -0,0 +1,29 @@
+name: seattle-hotel-agent
+description: >
+ A travel assistant agent that helps users find hotels in Seattle.
+ Demonstrates local C# tool execution - a key advantage of code-based
+ hosted agents over prompt agents.
+metadata:
+ authors:
+ - Microsoft
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Local Tools
+ - Travel Assistant
+ - Hotel Search
+template:
+ name: seattle-hotel-agent
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_AI_PROJECT_ENDPOINT
+ value: ${AZURE_AI_PROJECT_ENDPOINT}
+ - name: MODEL_DEPLOYMENT_NAME
+ value: gpt-4o-mini
+resources:
+ - kind: model
+ id: gpt-4o-mini
+ name: chat
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
new file mode 100644
index 0000000000..4f2e87e097
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
@@ -0,0 +1,52 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+### Simple hotel search - budget under $200
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under $200 per night",
+ "stream": false
+}
+
+### Hotel search with higher budget
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "Find me hotels in Seattle for March 20-23, 2025 under $250 per night",
+ "stream": false
+}
+
+### Ask for recommendations without dates (agent should ask for clarification)
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": "What hotels do you recommend in Seattle?",
+ "stream": false
+}
+
+### Explicit input format
+POST {{endpoint}}
+Content-Type: application/json
+
+{
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "I'm looking for a hotel in Seattle from 2025-04-01 to 2025-04-05, my budget is $150 per night maximum"
+ }
+ ]
+ }
+ ],
+ "stream": false
+}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
index 614597bed9..396bc1bc9b 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
@@ -8,6 +8,8 @@ Key features:
- Managing conversation memory with a rolling window approach
- Citing source documents in AI responses
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before running this sample, ensure you have:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
new file mode 100644
index 0000000000..ce8a739757
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/AgentWithTools.csproj
@@ -0,0 +1,69 @@
+
+
+
+ Exe
+ net10.0
+
+ enable
+ enable
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile
new file mode 100644
index 0000000000..c9f39f9574
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Dockerfile
@@ -0,0 +1,20 @@
+# Build the application
+FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
+WORKDIR /src
+
+# Copy files from the current directory on the host to the working directory in the container
+COPY . .
+
+RUN dotnet restore
+RUN dotnet build -c Release --no-restore
+RUN dotnet publish -c Release --no-build -o /app -f net10.0
+
+# Run the application
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+
+# Copy everything needed to run the app from the "build" stage.
+COPY --from=build /app .
+
+EXPOSE 8088
+ENTRYPOINT ["dotnet", "AgentWithTools.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
new file mode 100644
index 0000000000..3bb68d6e31
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/Program.cs
@@ -0,0 +1,43 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates how to use Foundry tools (MCP and code interpreter)
+// with an AI agent hosted using the Azure AI AgentServer SDK.
+
+using Azure.AI.AgentServer.AgentFramework.Extensions;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+
+var openAiEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var toolConnectionId = Environment.GetEnvironmentVariable("MCP_TOOL_CONNECTION_ID") ?? throw new InvalidOperationException("MCP_TOOL_CONNECTION_ID is not set.");
+
+var credential = new AzureCliCredential();
+
+var chatClient = new AzureOpenAIClient(new Uri(openAiEndpoint), credential)
+ .GetChatClient(deploymentName)
+ .AsIChatClient()
+ .AsBuilder()
+ .UseFoundryTools(new { type = "mcp", project_connection_id = toolConnectionId }, new { type = "code_interpreter" })
+ .UseOpenTelemetry(sourceName: "Agents", configure: (cfg) => cfg.EnableSensitiveData = true)
+ .Build();
+
+var agent = new ChatClientAgent(chatClient,
+ name: "AgentWithTools",
+ instructions: @"You are a helpful assistant with access to tools for fetching Microsoft documentation.
+
+ IMPORTANT: When the user asks about Microsoft Learn articles or documentation:
+ 1. You MUST use the microsoft_docs_fetch tool to retrieve the actual content
+ 2. Do NOT rely on your training data
+ 3. Always fetch the latest information from the provided URL
+
+ Available tools:
+ - microsoft_docs_fetch: Fetches and converts Microsoft Learn documentation
+ - microsoft_docs_search: Searches Microsoft/Azure documentation
+ - microsoft_code_sample_search: Searches for code examples")
+ .AsBuilder()
+ .UseOpenTelemetry(sourceName: "Agents", configure: (cfg) => cfg.EnableSensitiveData = true)
+ .Build();
+
+await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
new file mode 100644
index 0000000000..5a80ecda9f
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/README.md
@@ -0,0 +1,45 @@
+# What this sample demonstrates
+
+This sample demonstrates how to use Foundry tools with an AI agent via the `UseFoundryTools` extension. The agent is configured with two tool types: an MCP (Model Context Protocol) connection for fetching Microsoft Learn documentation and a code interpreter for running code when needed.
+
+Key features:
+
+- Configuring Foundry tools using `UseFoundryTools` with MCP and code interpreter
+- Connecting to an external MCP tool via a Foundry project connection
+- Using `AzureCliCredential` for Azure authentication
+- OpenTelemetry instrumentation for both the chat client and agent
+
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
+## Prerequisites
+
+In addition to the common prerequisites:
+
+1. An **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-5.2`, `gpt-4o-mini`)
+2. The **Azure AI Developer** role assigned on the Foundry resource (includes the `agents/write` data action required by `UseFoundryTools`)
+3. An **MCP tool connection** configured in your Foundry project pointing to `https://learn.microsoft.com/api/mcp`
+
+## Environment Variables
+
+In addition to the common environment variables in the root README:
+
+```powershell
+# Your Azure AI Foundry project endpoint (required by UseFoundryTools)
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.services.ai.azure.com/api/projects/your-project"
+
+# Chat model deployment name (defaults to gpt-4o-mini if not set)
+$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini"
+
+# The MCP tool connection name (just the name, not the full ARM resource ID)
+$env:MCP_TOOL_CONNECTION_ID="SampleMCPTool"
+```
+
+## How It Works
+
+1. An `AzureOpenAIClient` is created with `AzureCliCredential` and used to get a chat client
+2. The chat client is wrapped with `UseFoundryTools` which registers two Foundry tool types:
+ - **MCP connection**: Connects to an external MCP server (Microsoft Learn) via the project connection name, providing documentation fetch and search capabilities
+ - **Code interpreter**: Allows the agent to execute code snippets when needed
+3. `UseFoundryTools` resolves the connection using `AZURE_AI_PROJECT_ENDPOINT` internally
+4. A `ChatClientAgent` is created with instructions guiding it to use the MCP tools for documentation queries
+5. The agent is hosted using `RunAIAgentAsync` which exposes the OpenAI Responses-compatible API endpoint
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml
new file mode 100644
index 0000000000..5d2b1f8d8d
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/agent.yaml
@@ -0,0 +1,31 @@
+name: AgentWithTools
+displayName: "Agent with Tools"
+description: >
+ An AI agent that uses Foundry tools (MCP and code interpreter) with Azure OpenAI.
+ The agent can fetch Microsoft Learn documentation and run code when needed.
+metadata:
+ authors:
+ - Microsoft Agent Framework Team
+ tags:
+ - Azure AI AgentServer
+ - Microsoft Agent Framework
+ - Tools
+ - MCP
+ - Code Interpreter
+template:
+ kind: hosted
+ name: AgentWithTools
+ protocols:
+ - protocol: responses
+ version: v1
+ environment_variables:
+ - name: AZURE_OPENAI_ENDPOINT
+ value: ${AZURE_OPENAI_ENDPOINT}
+ - name: AZURE_OPENAI_DEPLOYMENT_NAME
+ value: gpt-4o-mini
+ - name: MCP_TOOL_CONNECTION_ID
+ value: ${MCP_TOOL_CONNECTION_ID}
+resources:
+ - name: "gpt-4o-mini"
+ kind: model
+ id: gpt-4o-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http
new file mode 100644
index 0000000000..22a37ff54e
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTools/run-requests.http
@@ -0,0 +1,30 @@
+@host = http://localhost:8088
+@endpoint = {{host}}/responses
+
+### Health Check
+GET {{host}}/readiness
+
+### Simple string input
+POST {{endpoint}}
+Content-Type: application/json
+{
+ "input": "Please use the microsoft_docs_fetch tool to fetch and summarize the Microsoft Learn article at https://learn.microsoft.com/azure/ai-services/openai/overview"
+}
+
+### Explicit input
+POST {{endpoint}}
+Content-Type: application/json
+{
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Please use the microsoft_docs_fetch tool to fetch and summarize the Microsoft Learn article at https://learn.microsoft.com/azure/ai-services/openai/overview"
+ }
+ ]
+ }
+ ]
+}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
index 5f6babc755..72019bbf22 100644
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
+++ b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
@@ -9,6 +9,8 @@ This workflow uses three translation agents:
The agents are connected sequentially, creating a translation chain that demonstrates how AI-powered components can be seamlessly integrated into workflow pipelines.
+> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
+
## Prerequisites
Before you begin, ensure you have the following prerequisites:
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/README.md b/dotnet/samples/05-end-to-end/HostedAgents/README.md
new file mode 100644
index 0000000000..f7a3bdc94b
--- /dev/null
+++ b/dotnet/samples/05-end-to-end/HostedAgents/README.md
@@ -0,0 +1,125 @@
+# Hosted Agent Samples
+
+These samples demonstrate how to build and host AI agents using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme). Each sample can be run locally and deployed to Microsoft Foundry as a hosted agent.
+
+## Samples
+
+| Sample | Description |
+|--------|-------------|
+| [`AgentWithTools`](./AgentWithTools/) | Foundry tools (MCP + code interpreter) via `UseFoundryTools` |
+| [`AgentWithLocalTools`](./AgentWithLocalTools/) | Local C# function tool execution (Seattle hotel search) |
+| [`AgentThreadAndHITL`](./AgentThreadAndHITL/) | Human-in-the-loop with `ApprovalRequiredAIFunction` and thread persistence |
+| [`AgentWithHostedMCP`](./AgentWithHostedMCP/) | Hosted MCP server tool (Microsoft Learn search) |
+| [`AgentWithTextSearchRag`](./AgentWithTextSearchRag/) | RAG with `TextSearchProvider` (Contoso Outdoors) |
+| [`AgentsInWorkflows`](./AgentsInWorkflows/) | Sequential workflow pipeline (translation chain) |
+
+## Common Prerequisites
+
+Before running any sample, ensure you have:
+
+1. **.NET 10 SDK** or later — [Download](https://dotnet.microsoft.com/download/dotnet/10.0)
+2. **Azure CLI** installed — [Install guide](https://learn.microsoft.com/cli/azure/install-azure-cli)
+3. **Azure OpenAI** or **Azure AI Foundry project** with a chat model deployed (e.g., `gpt-4o-mini`)
+
+### Authenticate with Azure CLI
+
+All samples use `AzureCliCredential` for authentication. Make sure you're logged in:
+
+```powershell
+az login
+az account show # Verify the correct subscription
+```
+
+### Common Environment Variables
+
+Most samples require one or more of these environment variables:
+
+| Variable | Used By | Description |
+|----------|---------|-------------|
+| `AZURE_OPENAI_ENDPOINT` | Most samples | Your Azure OpenAI resource endpoint URL |
+| `AZURE_OPENAI_DEPLOYMENT_NAME` | Most samples | Chat model deployment name (defaults to `gpt-4o-mini`) |
+| `AZURE_AI_PROJECT_ENDPOINT` | AgentWithTools, AgentWithLocalTools | Azure AI Foundry project endpoint |
+| `MCP_TOOL_CONNECTION_ID` | AgentWithTools | Foundry MCP tool connection name |
+| `MODEL_DEPLOYMENT_NAME` | AgentWithLocalTools | Chat model deployment name (defaults to `gpt-4o-mini`) |
+
+See each sample's README for the specific variables required.
+
+## Azure AI Foundry Setup (for samples that use Foundry)
+
+Some samples (`AgentWithTools`, `AgentWithLocalTools`) connect to an Azure AI Foundry project. If you're using these samples, you'll need additional setup.
+
+### Azure AI Developer Role
+
+The `UseFoundryTools` extension requires the **Azure AI Developer** role on the Cognitive Services resource. Even if you created the project, you may not have this role by default.
+
+```powershell
+az role assignment create `
+ --role "Azure AI Developer" `
+ --assignee "your-email@microsoft.com" `
+ --scope "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.CognitiveServices/accounts/{account-name}"
+```
+
+> **Note**: You need **Owner** or **User Access Administrator** permissions on the resource to assign roles. If you don't have this, you may need to request JIT (Just-In-Time) elevated access via [Azure PIM](https://portal.azure.com/#view/Microsoft_Azure_PIMCommon/ActivationMenuBlade/~/aadmigratedresource).
+
+For more details on permissions, see [Azure AI Foundry Permissions](https://aka.ms/FoundryPermissions).
+
+### Creating an MCP Tool Connection
+
+The `AgentWithTools` sample requires an MCP tool connection configured in your Foundry project:
+
+1. Go to the [Azure AI Foundry portal](https://ai.azure.com)
+2. Navigate to your project
+3. Go to **Connected resources** → **+ New connection** → **Model Context Protocol tool**
+4. Fill in:
+ - **Name**: `SampleMCPTool` (or any name you prefer)
+ - **Remote MCP Server endpoint**: `https://learn.microsoft.com/api/mcp`
+ - **Authentication**: `Unauthenticated`
+5. Click **Connect**
+
+The connection **name** (e.g., `SampleMCPTool`) is used as the `MCP_TOOL_CONNECTION_ID` environment variable.
+
+> **Important**: Use only the connection **name**, not the full ARM resource ID.
+
+## Running a Sample
+
+Each sample runs as a standalone hosted agent on `http://localhost:8088/`:
+
+```powershell
+cd
+dotnet run
+```
+
+### Interacting with the Agent
+
+Each sample includes a `run-requests.http` file for testing with the [VS Code REST Client](https://marketplace.visualstudio.com/items?itemName=humao.rest-client) extension, or you can use PowerShell:
+
+```powershell
+$body = @{ input = "Your question here" } | ConvertTo-Json
+Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $body -ContentType "application/json"
+```
+
+## Deploying to Microsoft Foundry
+
+Each sample includes a `Dockerfile` and `agent.yaml` for deployment. To deploy your agent to Microsoft Foundry, follow the [hosted agents deployment guide](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents).
+
+## Troubleshooting
+
+### `PermissionDenied` — lacks `agents/write` data action
+
+Assign the **Azure AI Developer** role to your user. See [Azure AI Developer Role](#azure-ai-developer-role) above.
+
+### `Project connection ... was not found`
+
+Make sure `MCP_TOOL_CONNECTION_ID` contains only the connection **name** (e.g., `SampleMCPTool`), not the full ARM resource ID path.
+
+### `AZURE_AI_PROJECT_ENDPOINT must be set`
+
+The `UseFoundryTools` extension requires `AZURE_AI_PROJECT_ENDPOINT`. Set it to your Foundry project endpoint (e.g., `https://your-resource.services.ai.azure.com/api/projects/your-project`).
+
+### Multi-framework error when running `dotnet run`
+
+If you see "Your project targets multiple frameworks", specify the framework:
+
+```powershell
+dotnet run --framework net10.0
+```