diff --git a/.gitignore b/.gitignore
index 877974080..6c69882a2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,3 +14,5 @@ dist
.envrc
codegen.log
Brewfile.lock.json
+
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/.python-version b/.python-version
index 43077b246..e4fba2183 100644
--- a/.python-version
+++ b/.python-version
@@ -1 +1 @@
-3.9.18
+3.12
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 55f722d91..ba6c34836 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.0.1-alpha.1"
+ ".": "0.1.0-alpha.1"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index f89209bb1..2aac91305 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 36
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/sgp%2Fagentex-sdk-021b55c88964b7a5bfc9d692d32a52c6b0150445656d2407c4cb8e9dd1e5f100.yml
-openapi_spec_hash: ed92c0d5d6bed9cb5617f8a776ac42c9
-config_hash: ea7ccb4f8ed1981b364cef82aa595243
+configured_endpoints: 34
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/sgp%2Fagentex-sdk-1d08fb2290b5310c91801d7575d356628d372fd5434e15d3b9cead48eadb893f.yml
+openapi_spec_hash: 216a0edbf4e1a3cde23329d4f385faed
+config_hash: 7661726e3cccf9f6349179841153601d
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e2079d6e0..da9e4e1c1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,13 @@
# Changelog
+## 0.1.0-alpha.1 (2025-07-22)
+
+Full Changelog: [v0.0.1-alpha.1...v0.1.0-alpha.1](https://github.com/scaleapi/agentex-python/compare/v0.0.1-alpha.1...v0.1.0-alpha.1)
+
+### Features
+
+* **api:** manual updates ([06f5fe1](https://github.com/scaleapi/agentex-python/commit/06f5fe115ace5ec4ca8149cd0afa6207b193a04c))
+
## 0.0.1-alpha.1 (2025-07-22)
Full Changelog: [v0.0.1-alpha.0...v0.0.1-alpha.1](https://github.com/scaleapi/agentex-python/compare/v0.0.1-alpha.0...v0.0.1-alpha.1)
diff --git a/README.md b/README.md
index db9c7afa2..2bdbb5925 100644
--- a/README.md
+++ b/README.md
@@ -28,16 +28,17 @@ pip install git+ssh://git@github.com/scaleapi/agentex-python.git
The full API of this library can be found in [api.md](api.md).
```python
-import os
from agentex import Agentex
client = Agentex(
- api_key=os.environ.get("AGENTEX_SDK_API_KEY"), # This is the default and can be omitted
+ # defaults to "production".
+ environment="development",
)
-response = client.echo.send(
- message="message",
+agent = client.agents.retrieve(
+ "agent_id",
)
+print(agent.id)
```
While you can provide an `api_key` keyword argument,
@@ -50,19 +51,20 @@ so that your API Key is not stored in source control.
Simply import `AsyncAgentex` instead of `Agentex` and use `await` with each API call:
```python
-import os
import asyncio
from agentex import AsyncAgentex
client = AsyncAgentex(
- api_key=os.environ.get("AGENTEX_SDK_API_KEY"), # This is the default and can be omitted
+ # defaults to "production".
+ environment="development",
)
async def main() -> None:
- response = await client.echo.send(
- message="message",
+ agent = await client.agents.retrieve(
+ "agent_id",
)
+ print(agent.id)
asyncio.run(main())
@@ -91,12 +93,12 @@ from agentex import AsyncAgentex
async def main() -> None:
async with AsyncAgentex(
- api_key="My API Key",
http_client=DefaultAioHttpClient(),
) as client:
- response = await client.echo.send(
- message="message",
+ agent = await client.agents.retrieve(
+ "agent_id",
)
+ print(agent.id)
asyncio.run(main())
@@ -127,8 +129,8 @@ from agentex import Agentex
client = Agentex()
try:
- client.echo.send(
- message="message",
+ client.agents.retrieve(
+ "agent_id",
)
except agentex.APIConnectionError as e:
print("The server could not be reached")
@@ -172,8 +174,8 @@ client = Agentex(
)
# Or, configure per-request:
-client.with_options(max_retries=5).echo.send(
- message="message",
+client.with_options(max_retries=5).agents.retrieve(
+ "agent_id",
)
```
@@ -197,8 +199,8 @@ client = Agentex(
)
# Override per-request:
-client.with_options(timeout=5.0).echo.send(
- message="message",
+client.with_options(timeout=5.0).agents.retrieve(
+ "agent_id",
)
```
@@ -240,13 +242,13 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
from agentex import Agentex
client = Agentex()
-response = client.echo.with_raw_response.send(
- message="message",
+response = client.agents.with_raw_response.retrieve(
+ "agent_id",
)
print(response.headers.get('X-My-Header'))
-echo = response.parse() # get the object that `echo.send()` would have returned
-print(echo)
+agent = response.parse() # get the object that `agents.retrieve()` would have returned
+print(agent.id)
```
These methods return an [`APIResponse`](https://github.com/scaleapi/agentex-python/tree/main/src/agentex/_response.py) object.
@@ -260,8 +262,8 @@ The above interface eagerly reads the full response body when you make the reque
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.
```python
-with client.echo.with_streaming_response.send(
- message="message",
+with client.agents.with_streaming_response.retrieve(
+ "agent_id",
) as response:
print(response.headers.get("X-My-Header"))
diff --git a/api.md b/api.md
index 45be6a645..8d4e61562 100644
--- a/api.md
+++ b/api.md
@@ -1,37 +1,35 @@
-# Agentex
-
-Methods:
-
-- client.get_root() -> object
-
-# Echo
-
-Methods:
-
-- client.echo.send(\*\*params) -> object
-
# Agents
Types:
```python
-from agentex.types import AcpType, Agent, AgentRpcRequest, AgentListResponse
+from agentex.types import (
+ AcpType,
+ Agent,
+ AgentRpcParams,
+ AgentRpcRequest,
+ AgentRpcResponse,
+ AgentRpcResult,
+ DataDelta,
+ TaskMessageContent,
+ TaskMessageDelta,
+ TaskMessageUpdate,
+ TextDelta,
+ ToolRequestDelta,
+ ToolResponseDelta,
+ AgentListResponse,
+)
```
Methods:
-- client.agents.retrieve(agent_id) -> Agent
-- client.agents.list(\*\*params) -> AgentListResponse
-- client.agents.delete(agent_id) -> Agent
-- client.agents.rpc(agent_id, \*\*params) -> object
-
-## Name
-
-Methods:
-
-- client.agents.name.retrieve(agent_name) -> Agent
-- client.agents.name.delete(agent_name) -> Agent
-- client.agents.name.rpc(agent_name, \*\*params) -> object
+- client.agents.retrieve(agent_id) -> Agent
+- client.agents.list(\*\*params) -> AgentListResponse
+- client.agents.delete(agent_id) -> Agent
+- client.agents.delete_by_name(agent_name) -> Agent
+- client.agents.retrieve_by_name(agent_name) -> Agent
+- client.agents.rpc(agent_id, \*\*params) -> AgentRpcResponse
+- client.agents.rpc_by_name(agent_name, \*\*params) -> AgentRpcResponse
# Tasks
@@ -43,18 +41,13 @@ from agentex.types import Task, TaskListResponse
Methods:
-- client.tasks.retrieve(task_id) -> Task
-- client.tasks.list() -> TaskListResponse
-- client.tasks.delete(task_id) -> Task
-- client.tasks.stream_events(task_id) -> object
-
-## Name
-
-Methods:
-
-- client.tasks.name.retrieve(task_name) -> Task
-- client.tasks.name.delete(task_name) -> Task
-- client.tasks.name.stream_events(task_name) -> object
+- client.tasks.retrieve(task_id) -> Task
+- client.tasks.list() -> TaskListResponse
+- client.tasks.delete(task_id) -> Task
+- client.tasks.delete_by_name(task_name) -> Task
+- client.tasks.retrieve_by_name(task_name) -> Task
+- client.tasks.stream_events(task_id) -> object
+- client.tasks.stream_events_by_name(task_name) -> object
# Messages
@@ -65,7 +58,6 @@ from agentex.types import (
DataContent,
MessageAuthor,
MessageStyle,
- StreamingStatus,
TaskMessage,
TextContent,
ToolRequestContent,
diff --git a/examples/tutorials/00_sync/000_hello_acp/.dockerignore b/examples/tutorials/00_sync/000_hello_acp/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/000_hello_acp/Dockerfile b/examples/tutorials/00_sync/000_hello_acp/Dockerfile
new file mode 100644
index 000000000..34f07ab19
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 000_hello_acp/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 000_hello_acp/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/000_hello_acp/README.md b/examples/tutorials/00_sync/000_hello_acp/README.md
new file mode 100644
index 000000000..87631a8a1
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/README.md
@@ -0,0 +1,128 @@
+# hello-sync - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+hello_sync/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
diff --git a/examples/tutorials/00_sync/000_hello_acp/manifest.yaml b/examples/tutorials/00_sync/000_hello_acp/manifest.yaml
new file mode 100644
index 000000000..72398ea07
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/manifest.yaml
@@ -0,0 +1,122 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 000_hello_acp
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 000_hello_acp/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 000_hello_acp/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: s000-hello-acp
+
+ # Type of ACP to use
+ # sync: Simple synchronous ACP implementation
+ # agentic: Advanced ACP with sub-types "base" or "temporal" (requires config)
+ acp_type: sync
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that just says hello and acknowledges the user's message
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "s000-hello-acp"
+ description: "An AgentEx agent that just says hello and acknowledges the user's message"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/000_hello_acp/project/__init__.py b/examples/tutorials/00_sync/000_hello_acp/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/00_sync/000_hello_acp/project/acp.py b/examples/tutorials/00_sync/000_hello_acp/project/acp.py
new file mode 100644
index 000000000..0c7b66450
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/project/acp.py
@@ -0,0 +1,28 @@
+from typing import AsyncGenerator, Union
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendMessageParams
+
+from agentex.lib.types.task_message_updates import TaskMessageUpdate
+from agentex.types.task_message import TaskMessageContent
+from agentex.types.task_message_content import TextContent
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="sync",
+)
+
+
+@acp.on_message_send
+async def handle_message_send(
+ params: SendMessageParams
+) -> Union[TaskMessageContent, AsyncGenerator[TaskMessageUpdate, None]]:
+ """Default message handler with streaming support"""
+ return TextContent(
+ author="agent",
+ content=f"Hello! I've received your message. Here's a generic response, but in future tutorials we'll see how you can get me to intelligently respond to your message. This is what I heard you say: {params.content.content}",
+ )
+
diff --git a/examples/tutorials/00_sync/000_hello_acp/requirements.txt b/examples/tutorials/00_sync/000_hello_acp/requirements.txt
new file mode 100644
index 000000000..2c2ebf15b
--- /dev/null
+++ b/examples/tutorials/00_sync/000_hello_acp/requirements.txt
@@ -0,0 +1,5 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/010_multiturn/.dockerignore b/examples/tutorials/00_sync/010_multiturn/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/010_multiturn/Dockerfile b/examples/tutorials/00_sync/010_multiturn/Dockerfile
new file mode 100644
index 000000000..29f524911
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 010_multiturn/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 010_multiturn/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/010_multiturn/README.md b/examples/tutorials/00_sync/010_multiturn/README.md
new file mode 100644
index 000000000..a23f76531
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/README.md
@@ -0,0 +1,127 @@
+# s010-multiturn - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+s010_multiturn/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/010_multiturn/manifest.yaml b/examples/tutorials/00_sync/010_multiturn/manifest.yaml
new file mode 100644
index 000000000..a68ab6bed
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/manifest.yaml
@@ -0,0 +1,118 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 010_multiturn
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 010_multiturn/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 010_multiturn/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: sync
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: s010-multiturn
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "s010-multiturn"
+ description: "An AgentEx agent"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/010_multiturn/project/__init__.py b/examples/tutorials/00_sync/010_multiturn/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/00_sync/010_multiturn/project/acp.py b/examples/tutorials/00_sync/010_multiturn/project/acp.py
new file mode 100644
index 000000000..0067cec30
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/project/acp.py
@@ -0,0 +1,119 @@
+import os
+from typing import AsyncGenerator, Union
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendMessageParams
+from agentex.lib.types.llm_messages import AssistantMessage, LLMConfig, SystemMessage, UserMessage
+from agentex.lib.types.task_message_updates import TaskMessageUpdate
+from agentex.types.task_message import TaskMessageContent
+from agentex.types.task_message_content import TextContent
+from agentex.lib.utils.model_utils import BaseModel
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="sync",
+)
+
+
+class StateModel(BaseModel):
+ system_prompt: str
+ model: str
+
+
+# Note: The return of this handler is required to be persisted by the Agentex Server
+@acp.on_message_send
+async def handle_message_send(
+ params: SendMessageParams
+) -> Union[TaskMessageContent, AsyncGenerator[TaskMessageUpdate, None]]:
+ """
+ In this tutorial, we'll see how to handle a basic multi-turn conversation without streaming.
+ """
+ #########################################################
+ # 0. Validate the message.
+ #########################################################
+
+ if params.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.content.type}")
+
+ if params.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.content.author}")
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ return TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ )
+
+ #########################################################
+ # 1. Initialize the state. Using state is optional, but it's a good way to store information between turns.
+ #########################################################
+
+ # Try to retrieve the state. If it doesn't exist, create it.
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+
+ if not task_state:
+ # If the state doesn't exist, create it.
+ state = StateModel(system_prompt="You are a helpful assistant that can answer questions.", model="gpt-4o-mini")
+ task_state = await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+ else:
+ state = StateModel.model_validate(task_state.state)
+
+ #########################################################
+ # 2. Fetch our message history.
+ #########################################################
+
+ task_messages = await adk.messages.list(task_id=params.task.id)
+
+ #########################################################
+ # 3. Convert task messages to LLM messages.
+ #########################################################
+
+ # This might seem duplicative, but the split between TaskMessage and LLMMessage is intentional and important.
+
+ llm_messages = [
+ SystemMessage(content=state.system_prompt),
+ *[
+ UserMessage(content=message.content.content) if message.content.author == "user" else AssistantMessage(content=message.content.content)
+ for message in task_messages
+ if message.content.type == "text"
+ ]
+ ]
+
+ # TaskMessages are messages that are sent between an Agent and a Client. They are fundamentally decoupled from messages sent to the LLM. This is because you may want to send additional metadata to allow the client to render the message on the UI differently.
+
+ # LLMMessages are OpenAI-compatible messages that are sent to the LLM, and are used to track the state of a conversation with a model.
+
+ # In simple scenarios your conversion logic will just look like this. However, in complex scenarios where you are leveraging the flexibility of the TaskMessage type to send non-LLM-specific metadata, you should write custom conversion logic.
+
+ # Some complex scenarios include:
+ # - Taking a markdown document output by an LLM, postprocessing it into a JSON object to clearly denote title, content, and footers. This can be sent as a DataContent TaskMessage to the client and converted back to markdown here to send back to the LLM.
+ # - If using multiple LLMs (like in an actor-critic framework), you may want to send DataContent that denotes which LLM generated which part of the output and write conversion logic to split the TaskMessagehistory into multiple LLM conversations.
+ # - If using multiple LLMs, but one LLM's output should not be sent to the user (i.e. a critic model), you can leverage the State as an internal storage mechanism to store the critic model's conversation history. This i s a powerful and flexible way to handle complex scenarios.
+
+ #########################################################
+ # 4. Call an LLM to respond to the user's message.
+ #########################################################
+
+ # Call an LLM to respond to the user's message
+ chat_completion = await adk.providers.litellm.chat_completion(
+ llm_config=LLMConfig(model=state.model, messages=llm_messages),
+ trace_id=params.task.id,
+ )
+
+ #########################################################
+ # 5. Return the agent response to the client.
+ #########################################################
+
+ # The Agentex server automatically commits input and output messages to the database so you don't need to do this yourself, simply process the input content and return the output content.
+
+ # Return the agent response to the client
+ if chat_completion.choices[0].message:
+ content_str = chat_completion.choices[0].message.content or ""
+ else:
+ content_str = ""
+
+ return TextContent(
+ author="agent",
+ content=content_str
+ )
diff --git a/examples/tutorials/00_sync/010_multiturn/requirements.txt b/examples/tutorials/00_sync/010_multiturn/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/00_sync/010_multiturn/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/020_streaming/.dockerignore b/examples/tutorials/00_sync/020_streaming/.dockerignore
new file mode 100644
index 000000000..e2a6524b2
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/.dockerignore
@@ -0,0 +1,47 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
+
diff --git a/examples/tutorials/00_sync/020_streaming/Dockerfile b/examples/tutorials/00_sync/020_streaming/Dockerfile
new file mode 100644
index 000000000..99958ebc5
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/Dockerfile
@@ -0,0 +1,44 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 020_streaming/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+# Copy the project code
+COPY 020_streaming/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/020_streaming/README.md b/examples/tutorials/00_sync/020_streaming/README.md
new file mode 100644
index 000000000..920acf28b
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/README.md
@@ -0,0 +1,129 @@
+# s020-streaming - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+hello_sync/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
+
diff --git a/examples/tutorials/00_sync/020_streaming/manifest.yaml b/examples/tutorials/00_sync/020_streaming/manifest.yaml
new file mode 100644
index 000000000..71460af04
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/manifest.yaml
@@ -0,0 +1,119 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+
+ include_paths:
+ - 020_streaming
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 020_streaming/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 020_streaming/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: sync
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: s020-streaming
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that does multiturn streaming chat
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "s020-streaming"
+ description: "An AgentEx agent that does multiturn streaming chat"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/020_streaming/project/__init__.py b/examples/tutorials/00_sync/020_streaming/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/00_sync/020_streaming/project/acp.py b/examples/tutorials/00_sync/020_streaming/project/acp.py
new file mode 100644
index 000000000..787f2daed
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/project/acp.py
@@ -0,0 +1,98 @@
+import os
+from typing import AsyncGenerator, Union
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendMessageParams
+from agentex.lib.types.llm_messages import AssistantMessage, LLMConfig, SystemMessage, UserMessage
+from agentex.lib.types.task_message_updates import StreamTaskMessageDelta, StreamTaskMessageDone, StreamTaskMessageFull, TaskMessageUpdate, TextDelta
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.task_message_content import TaskMessageContent, TextContent
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="sync",
+)
+
+
+class StateModel(BaseModel):
+ system_prompt: str
+ model: str
+
+
+# Note: The return of this handler is required to be persisted by the Agentex Server
+@acp.on_message_send
+async def handle_message_send(
+ params: SendMessageParams
+) -> Union[TaskMessageContent, AsyncGenerator[TaskMessageUpdate, None]]:
+ """
+ In this tutorial, we'll see how to handle a basic multi-turn conversation without streaming.
+ """
+ #########################################################
+ # 1-3. These steps are all the same as the hello acp tutorial.
+ #########################################################
+
+ if not params.content:
+ return
+
+ if params.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.content.type}")
+
+ if params.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.content.author}")
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ yield StreamTaskMessageFull(
+ index=0,
+ type="full",
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ )
+
+ # Try to retrieve the state. If it doesn't exist, create it.
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+
+ if not task_state:
+ # If the state doesn't exist, create it.
+ state = StateModel(system_prompt="You are a helpful assistant that can answer questions.", model="gpt-4o-mini")
+ task_state = await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+ else:
+ state = StateModel.model_validate(task_state.state)
+
+ task_messages = await adk.messages.list(task_id=params.task.id)
+
+ llm_messages = [
+ SystemMessage(content=state.system_prompt),
+ *[
+ UserMessage(content=message.content.content) if message.content.author == "user" else AssistantMessage(content=message.content.content)
+ for message in task_messages
+ if message.content and message.content.type == "text"
+ ]
+ ]
+
+ #########################################################
+ # 4. Call an LLM to respond to the user's message and stream the response to the client.
+ #########################################################
+
+ # Call an LLM to respond to the user's message
+
+ print(f"Calling LLM with model {state.model} and messages {llm_messages}")
+
+ # The Agentex server automatically commits input and output messages to the database so you don't need to do this yourself, simply process the input content and return the output content.
+
+ message_index = 0
+ async for chunk in adk.providers.litellm.chat_completion_stream(
+ llm_config=LLMConfig(model=state.model, messages=llm_messages, stream=True),
+ trace_id=params.task.id,
+ ):
+ if chunk and chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content:
+ yield StreamTaskMessageDelta(
+ index=message_index,
+ delta=TextDelta(text_delta=chunk.choices[0].delta.content or ""),
+ )
+
+ yield StreamTaskMessageDone(
+ index=message_index,
+ )
diff --git a/examples/tutorials/00_sync/020_streaming/requirements.txt b/examples/tutorials/00_sync/020_streaming/requirements.txt
new file mode 100644
index 000000000..e9b9640aa
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
diff --git a/examples/tutorials/00_sync/020_streaming/test.ipynb b/examples/tutorials/00_sync/020_streaming/test.ipynb
new file mode 100644
index 000000000..386043ed6
--- /dev/null
+++ b/examples/tutorials/00_sync/020_streaming/test.ipynb
@@ -0,0 +1,193 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "d1c309d6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "AGENT_NAME = \"s020-streaming\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "36834357",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from agentex import Agentex\n",
+ "\n",
+ "client = Agentex(api_key=\"random\", base_url=\"http://localhost:5003\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "id": "9f6e6ef0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# # (Optional) Create a new task. If you don't create a new task, each message will be sent to a new task. The server will create the task for you.\n",
+ "\n",
+ "# import uuid\n",
+ "\n",
+ "# TASK_ID = str(uuid.uuid4())[:8]\n",
+ "\n",
+ "# rpc_response = client.agents.rpc_by_name(\n",
+ "# agent_name=AGENT_NAME,\n",
+ "# method=\"task/create\",\n",
+ "# params={\n",
+ "# \"name\": f\"{TASK_ID}-task\",\n",
+ "# \"params\": {}\n",
+ "# }\n",
+ "# )\n",
+ "\n",
+ "# task = rpc_response.result\n",
+ "# print(task)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "id": "b03b0d37",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Hello! I can assist you with a variety of tasks, including:\n",
+ "\n",
+ "1. **Answering Questions**: I can provide information on a wide range of topics, including science, history, technology, and more.\n",
+ "2. **Explaining Concepts**: If you need help understanding a concept or topic, I can provide explanations and clarifications.\n",
+ "3. **Writing Assistance**: I can help with writing tasks, such as drafting emails, essays, or creative pieces.\n",
+ "4. **Language Support**: I can assist with grammar, vocabulary, and writing in different languages.\n",
+ "5. **Providing Recommendations**: Whether it's books, movies, or recipes, I can suggest options based on your preferences.\n",
+ "6. **Problem-Solving**: I can help you think through problems or brainstorm ideas.\n",
+ "\n",
+ "If there's something specific you need help with, feel free to ask!\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Test non streaming response\n",
+ "from agentex.types import TaskMessage, TextContent\n",
+ "\n",
+ "rpc_response = client.agents.rpc_by_name(\n",
+ " agent_name=AGENT_NAME,\n",
+ " method=\"message/send\",\n",
+ " params={\n",
+ " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n",
+ " \"stream\": False\n",
+ " }\n",
+ ")\n",
+ "\n",
+ "# # Extract and print just the text content from the response\n",
+ "# # The response is expected to be a dict with a \"result\" key containing a list of message dicts\n",
+ "if rpc_response and rpc_response.result:\n",
+ " for message in rpc_response.result:\n",
+ " if isinstance(message, TaskMessage):\n",
+ " content = message.content\n",
+ " if isinstance(content, TextContent):\n",
+ " text = content.content\n",
+ " print(text)\n",
+ " else:\n",
+ " print(\"No text content found in response.\")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "id": "79688331",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Hello! I can help with a variety of tasks, including:\n",
+ "\n",
+ "1. **Answering Questions**: I can provide information on a wide range of topics, from historical facts to scientific concepts.\n",
+ "\n",
+ "2. **Providing Explanations**: If you're looking to understand a specific topic better, I can explain it in detail.\n",
+ "\n",
+ "3. **Offering Writing Assistance**: I can help with writing prompts, editing text, or generating ideas for essays, articles, and more.\n",
+ "\n",
+ "4. **Solving Problems**: I can assist with math problems, logic puzzles, and more.\n",
+ "\n",
+ "5. **Learning Support**: I can help with study tips, summarizing information, and creating study guides.\n",
+ "\n",
+ "6. **Conversational Practice**: If you want to practice a language or just have a chat, I’m here for that too!\n",
+ "\n",
+ "7. **Recommendation Systems**: I can recommend books, movies, or other media based on your interests.\n",
+ "\n",
+ "Feel free to ask me anything specific you need help with!"
+ ]
+ }
+ ],
+ "source": [
+ "# Test streaming response\n",
+ "import json\n",
+ "from agentex.types import AgentRpcResponse\n",
+ "from agentex.types.agent_rpc_result import StreamTaskMessageDelta, StreamTaskMessageFull\n",
+ "from agentex.types.text_delta import TextDelta\n",
+ "\n",
+ "with client.agents.with_streaming_response.rpc_by_name(\n",
+ " agent_name=AGENT_NAME,\n",
+ " method=\"message/send\",\n",
+ " params={\n",
+ " \"content\": {\"type\": \"text\", \"author\": \"user\", \"content\": \"Hello what can you do?\"},\n",
+ " \"stream\": True\n",
+ " }\n",
+ ") as response:\n",
+ " for streaming_response_str in response.iter_text():\n",
+ " chunk_rpc_responses = streaming_response_str.split(\"\\n\")\n",
+ " for chunk_rpc_response in chunk_rpc_responses:\n",
+ " if chunk_rpc_response:\n",
+ " chunk_rpc_response_dict = json.loads(chunk_rpc_response)\n",
+ " chunk_rpc_response = AgentRpcResponse.model_validate(chunk_rpc_response_dict)\n",
+ " result = chunk_rpc_response.result\n",
+ " # Print only the text deltas as they arrive or any full messages\n",
+ " if isinstance(result, StreamTaskMessageDelta):\n",
+ " delta = result.delta\n",
+ " if isinstance(delta, TextDelta):\n",
+ " print(delta.text_delta, end=\"\", flush=True)\n",
+ " elif isinstance(result, StreamTaskMessageFull):\n",
+ " content = result.content\n",
+ " if isinstance(content, TextContent):\n",
+ " print(content.content)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4ffb663c",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.9"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/.dockerignore b/examples/tutorials/00_sync/030_hello_oldowan/.dockerignore
new file mode 100644
index 000000000..c3620f1bc
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/Dockerfile b/examples/tutorials/00_sync/030_hello_oldowan/Dockerfile
new file mode 100644
index 000000000..0ad11f331
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 030_hello_oldowan/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 030_hello_oldowan/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/README.md b/examples/tutorials/00_sync/030_hello_oldowan/README.md
new file mode 100644
index 000000000..756539ef6
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/README.md
@@ -0,0 +1,17 @@
+# Hello Oldowan Agent
+
+This is a simple example agent that demonstrates the basics of the Agent 2 Client Protocol (ACP) and the AgentEx framework with an integration to oldowan.
+
+## For Development
+Navigate to `tutorials/00_sync/030_hello_oldowan`
+
+```bash
+# Generate CodeArtifact configuration for building (run from repo root)
+./setup-build-codeartifact.sh
+
+# Set up local development environment
+uv venv --python 3.12
+source .venv/bin/activate
+
+uv pip install -r requirements.txt --prerelease=allow
+```
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/manifest.yaml b/examples/tutorials/00_sync/030_hello_oldowan/manifest.yaml
new file mode 100644
index 000000000..bcbd04e51
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/manifest.yaml
@@ -0,0 +1,115 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 030_hello_oldowan
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 030_hello_oldowan/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 030_hello_oldowan/.dockerignore
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: sync
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: s030-hello-oldowan
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that does multiturn streaming chat with tools in oldowan
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "s030-hello-oldowan"
+ description: "An AgentEx agent that does multiturn streaming chat with tools in oldowan"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/project/__init__.py b/examples/tutorials/00_sync/030_hello_oldowan/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/project/acp.py b/examples/tutorials/00_sync/030_hello_oldowan/project/acp.py
new file mode 100644
index 000000000..f6f6795a9
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/project/acp.py
@@ -0,0 +1,512 @@
+import os
+import json
+from typing import AsyncGenerator, Callable, List, Union, Dict
+from functools import partial
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.task_message_updates import (
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ TaskMessageUpdate,
+ TextDelta,
+ ToolRequestDelta,
+ ToolResponseDelta,
+)
+from agentex.lib.types.acp import SendMessageParams
+from agentex.lib.types.llm_messages import Message, UserMessage
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.span import Span
+from agentex.types.task_message_content import TaskMessageContent, TextContent, ToolRequestContent, ToolResponseContent
+
+
+from oldowan.tools.internal import google_search
+from oldowan.completions import (
+ ToolMessage,
+ ChatCompletionMessage,
+ simple_agent_acompletion,
+ ChoiceDelta,
+)
+
+assert os.environ.get("SGP_API_KEY") is not None, "SGP_API_KEY is not set"
+assert os.environ.get("SGP_ACCOUNT_ID") is not None, "SGP_ACCOUNT_ID is not set"
+
+
+def think(thinking_str: str):
+ """
+ Use the tool to think about something. It will not obtain new information or change the database, but just append the thought to the log. Use it when complex reasoning or some cache memory is needed.
+ Args:
+ thinking_str: A thought to think about.
+ """
+ return
+
+
+TOOL_DICT = {
+ "google_search": google_search,
+ "think": think,
+}
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="sync",
+)
+
+TOOL_RESPONSE_ID_SUFFIX = "_response"
+
+
+class SimpleAgentCompletionConfig(BaseModel):
+ model: str
+ tools: List[str]
+ max_tokens: int
+ stream: bool
+
+
+class StateModel(BaseModel):
+ turn_number: int # The number of turns the agent has taken
+ messages: List[Message] # The messages the agent has seen
+ simple_agent_completion_config: (
+ SimpleAgentCompletionConfig # The function to call to get an agent response
+ )
+
+
+def convert_choice_delta_to_stream_task_message_deltas(
+ choice_delta: ChoiceDelta, idx: int
+) -> List[StreamTaskMessageDelta]:
+ """
+ This function converts a ChoiceDelta to a list of StreamTaskMessageDelta objects.
+ Args:
+ choice_delta: The ChoiceDelta to convert.
+ parent_task_message: The parent task message.
+ Returns:
+ A list of StreamTaskMessageDelta objects.
+ """
+ # these are tool requests
+ deltas = []
+ if (
+ choice_delta.tool_calls is not None
+ and choice_delta.tool_calls[0].function.name is not None
+ ):
+ for tool_call in choice_delta.tool_calls:
+ deltas.append(
+ StreamTaskMessageDelta(
+ index=idx,
+ delta=ToolRequestDelta(
+ type="tool_request",
+ tool_call_id=tool_call.id,
+ name=tool_call.function.name,
+ arguments_delta=tool_call.function.arguments,
+ ),
+ )
+ )
+ # pass
+ # These are tool responses
+ elif choice_delta.role == "tool":
+ deltas.append(
+ StreamTaskMessageDelta(
+ index=idx,
+ delta=ToolResponseDelta(
+ type="tool_response",
+ tool_call_id=choice_delta.tool_call_id,
+ name=choice_delta.name,
+ content_delta=choice_delta.content,
+ ),
+ )
+ )
+
+ # These are assistant messages
+ elif choice_delta.content is not None:
+ deltas.append(
+ StreamTaskMessageDelta(
+ index=idx,
+ delta=TextDelta(
+ type="text",
+ text_delta=choice_delta.content,
+ ),
+ )
+ )
+
+ return deltas
+
+
+def convert_choice_delta_to_message_content(
+ choice_delta: ChoiceDelta,
+) -> TaskMessageContent:
+ """
+ This function converts a ChoiceDelta to a TaskMessageContent object.
+ Args:
+ choice_delta: The ChoiceDelta to convert.
+ Returns:
+ A TaskMessageContent object.
+ """
+ # This converts a ChoiceDelta to a TaskMessage which will instantiate "the box" to send to client
+ if choice_delta.tool_calls is not None:
+ # since we are streaming we can assume we onl need to create a message for the first tool call
+ return ToolRequestContent(
+ author="agent",
+ name=choice_delta.tool_calls[0].function.name,
+ tool_call_id=choice_delta.tool_calls[0].id,
+ arguments={}, # have to start this empty since we are streaming
+ )
+ elif choice_delta.role == "tool":
+ print("HERE I AM: ", choice_delta)
+ return ToolResponseContent(
+ author="agent",
+ tool_call_id=choice_delta.tool_calls[0].id,
+ name=choice_delta.name,
+ content="", # starting empty because we add to it
+ )
+ elif choice_delta.role == "assistant":
+ return TextContent(
+ author="agent",
+ content="", # starting empty because we add to it
+ )
+ raise ValueError(
+ f"Unknown role: {choice_delta.role}. Failed to convert to TaskMessage"
+ )
+
+
+def convert_oldowan_message_to_stream_task_message_full(
+ id_to_task_message_idx: Dict[str, int],
+ oldowan_message: Union[ChatCompletionMessage, ToolMessage],
+) -> List[StreamTaskMessageFull]:
+ """
+ This function converts an Oldowan message to a list of StreamTaskMessageFull objects.
+ Args:
+ task_messages: A dictionary of task messages.
+ task_id: The task id.
+ oldowan_message: The Oldowan message to convert.
+ Returns:
+ A list of StreamTaskMessageFull objects.
+ """
+
+ fulls = []
+ if isinstance(oldowan_message, ChatCompletionMessage):
+ # First create all tool calls
+ if oldowan_message.tool_calls is not None:
+ for tool_call in oldowan_message.tool_calls:
+ fulls.append(
+ StreamTaskMessageFull(
+ index=id_to_task_message_idx[tool_call.id],
+ content=ToolRequestContent(
+ author="agent",
+ name=tool_call.function.name,
+ arguments=json.loads(tool_call.function.arguments),
+ tool_call_id=tool_call.id,
+ ),
+ )
+ )
+
+ # Create the assistant messages
+ if oldowan_message.content is not None:
+ fulls.append(
+ StreamTaskMessageFull(
+ index=id_to_task_message_idx[oldowan_message.id],
+ content=TextContent(
+ author="agent",
+ content=oldowan_message.content,
+ ),
+ )
+ )
+
+ # Finally create the tool responses
+ elif isinstance(oldowan_message, ToolMessage):
+ fulls.append(
+ StreamTaskMessageFull(
+ index=id_to_task_message_idx[
+ oldowan_message.tool_call_id + TOOL_RESPONSE_ID_SUFFIX
+ ],
+ content=ToolResponseContent(
+ author="agent",
+ tool_call_id=oldowan_message.tool_call_id,
+ name=oldowan_message.name,
+ content=oldowan_message.content,
+ ),
+ )
+ )
+
+ return fulls
+
+
+def get_oldowan_message_ids(
+ oldowan_message: Union[ChatCompletionMessage, ToolMessage],
+) -> List[str]:
+ """
+ This function gets the ids of the oldowan message.
+ Args:
+ oldowan_message: The Oldowan message to get the ids of.
+ Returns:
+ A list of ids.
+ """
+ message_ids = []
+ if isinstance(oldowan_message, ChatCompletionMessage):
+ # check that there is content
+ if oldowan_message.content is not None:
+ message_ids.append(oldowan_message.id)
+
+ # check if there are tool calls
+ if oldowan_message.tool_calls is not None:
+ for tool_call in oldowan_message.tool_calls:
+ message_ids.append(tool_call.id)
+
+ elif isinstance(oldowan_message, ToolMessage):
+ message_ids.append(oldowan_message.tool_call_id + TOOL_RESPONSE_ID_SUFFIX)
+
+ return message_ids
+
+
+# This will eventually become adk.providers.oldowan.stream_agent_sync
+async def stream_oldowan_agent_sync(
+ messages: List[Message],
+ task_id: str,
+ span: Span,
+ simple_agent_acompletion_fn: Callable,
+) -> AsyncGenerator[StreamTaskMessageDelta, None]:
+ """
+ Stream an Oldowan agent response to the client.
+ Args:
+ messages: The messages to send to the agent.
+ task_id: The task id.
+ span: The span to use for tracing.
+ Returns:
+ AsyncGenerator[TaskMessageUpdate, None]: A generator of task message updates.
+ """
+ response_stream = await simple_agent_acompletion_fn(messages=messages)
+
+ # This is used to create the current TaskMessage object
+ cur_task_message_id = None
+ cur_idx = 0
+
+ # This maps id either from message object, tool_call, or tool_response to the TaskMessage object
+ id_to_task_message_idx = {}
+
+ # These are messages that have already been sent in "full"
+ persisted_messages = []
+ events = []
+
+ # These are ChoiceDelta objects
+ async for event in response_stream:
+ if event.role is not None:
+ # if there is a tool call made then check if its a new tool_call_id
+ if (
+ event.tool_calls is not None
+ and event.tool_calls[0].id is not None
+ and event.tool_calls[0].id not in id_to_task_message_idx
+ ):
+ print(f"Role changed: {event.role}")
+ print(f"Tool call id changed: {event.tool_calls[0].id}")
+ cur_task_message_id = event.tool_calls[0].id
+ id_to_task_message_idx[event.tool_calls[0].id] = cur_idx
+ cur_idx += 1
+
+ # id_to_task_message[event.tool_calls[0].id] = await adk.messages.create(
+ # task_id=task_id,
+ # content=convert_choice_delta_to_message_content(event),
+ # )
+ # print(f"Created new task message: {id_to_task_message[event.tool_calls[0].id]}")
+
+ # If you are in a tool response, you should check that either the tool_call_id has changed or your last type was not tool
+ elif event.role == "tool" and (
+ event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX
+ not in id_to_task_message_idx
+ ):
+ print(f"Role changed: {event.role}")
+ print(
+ f"Tool Response id: {event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX}"
+ )
+ cur_task_message_id = event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX
+ id_to_task_message_idx[event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX] = (
+ cur_idx
+ )
+ cur_idx += 1
+ # id_to_task_message[event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX] = await adk.messages.create(
+ # task_id=task_id,
+ # content=convert_choice_delta_to_message_content(event),
+ # )
+ # print(f"Created new task message: {id_to_task_message[event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX]}")
+
+ elif (
+ event.role == "assistant"
+ and event.content is not None
+ and event.id not in id_to_task_message_idx
+ ): # this is an assistant message
+ print(f"Role is: {event.role}")
+ assert hasattr(
+ event, "id"
+ ), "Event does not have an id, please upgrade to latest oldowan"
+ print(f"Event id: {event.id}")
+ cur_task_message_id = event.id
+ id_to_task_message_idx[event.id] = cur_idx
+ cur_idx += 1
+ # update the current role and task message
+ # id_to_task_message[event.id] = await adk.messages.create(
+ # task_id=task_id,
+ # content=convert_choice_delta_to_message_content(event),
+ # trace_id=task_id,
+ # )
+ # print(f"Created new task message: {id_to_task_message[event.id]}")
+
+ # Now we can create the items to stream
+ # NOTE: key assumption is that ChoiceDeltaToolCall can only apply to one tool call at a time.
+ for task_message_delta in convert_choice_delta_to_stream_task_message_deltas(
+ event, idx=id_to_task_message_idx[cur_task_message_id]
+ ):
+ yield task_message_delta
+
+ events.append(event)
+
+ # Issue is that we can either have an oldowan message before a task message has been created OR task message before the oldowan message
+ # this is because tool response messages are added to messages immediately, but streamed one after the other.
+ # For each oldowan message, if we haven't persisted it yet, then do so
+ for idx, oldowan_message in enumerate(response_stream.messages):
+ if oldowan_message not in persisted_messages and all(
+ [
+ id in id_to_task_message_idx
+ for id in get_oldowan_message_ids(oldowan_message)
+ ]
+ ):
+ async with adk.tracing.span(
+ trace_id=task_id,
+ parent_id=span.id,
+ name=f"Message {idx}",
+ input=messages
+ + response_stream.messages[:idx], # input messages to this message
+ ) as message_span:
+ message_span.output = oldowan_message
+
+ # Send the full messages now that they are done
+ for (
+ stream_task_message_full
+ ) in convert_oldowan_message_to_stream_task_message_full(
+ id_to_task_message_idx=id_to_task_message_idx,
+ oldowan_message=oldowan_message,
+ ):
+ yield stream_task_message_full
+
+ print(f"Persisted message: {oldowan_message}")
+ persisted_messages.append(oldowan_message)
+
+ # Stream the last object
+ async with adk.tracing.span(
+ trace_id=task_id,
+ parent_id=span.id,
+ name=f"Message {len(response_stream.messages)}",
+ input=messages + response_stream.messages[:-1],
+ ) as message_span:
+ message_span.output = response_stream.messages[-1]
+
+ # Persist the last message to the DB
+ for stream_task_message_full in convert_oldowan_message_to_stream_task_message_full(
+ id_to_task_message_idx=id_to_task_message_idx,
+ oldowan_message=response_stream.messages[-1],
+ ):
+ yield stream_task_message_full
+ print(f"Persisted message: {response_stream.messages[-1]}")
+ persisted_messages.append(response_stream.messages[-1])
+
+ # Aggregate the messages and store the output
+ messages = response_stream.messages
+ span.output = messages
+
+
+# Note: The return of this handler is required to be persisted by the Agentex Server
+@acp.on_message_send
+async def handle_message_send(
+ params: SendMessageParams
+) -> Union[TaskMessageContent, AsyncGenerator[TaskMessageUpdate, None]]:
+ """
+ In this tutorial, we'll see how to handle a basic multi-turn conversation without streaming.
+ """
+ #########################################################
+ # 1-3. These steps are all the same as the hello acp tutorial.
+ #########################################################
+
+ if params.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.content.type}")
+
+ if params.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.content.author}")
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ yield StreamTaskMessageFull(
+ index=0,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ )
+
+ # Try to retrieve the state. If it doesn't exist, create it.
+ task_state = await adk.state.get_by_task_and_agent(
+ task_id=params.task.id, agent_id=params.agent.id
+ )
+
+ if not task_state:
+ # If the state doesn't exist, create it.
+ state = StateModel(
+ simple_agent_completion_config=SimpleAgentCompletionConfig(
+ model="openai/gpt-4o",
+ tools=["google_search", "think"],
+ max_tokens=8192,
+ stream=True,
+ ),
+ messages=[],
+ turn_number=0,
+ )
+ assert all(
+ [tool in TOOL_DICT for tool in state.simple_agent_completion_config.tools]
+ ), f"Invalid tool: {state.simple_agent_completion_config.tools}"
+ task_state = await adk.state.create(
+ task_id=params.task.id, agent_id=params.agent.id, state=state
+ )
+ else:
+ state = StateModel.model_validate(task_state.state)
+
+ messages = state.messages
+
+ #########################################################
+ # 4. Call an LLM to respond to the user's message and stream the response to the client.
+ #########################################################
+ print(
+ f"Calling LLM with model {state.simple_agent_completion_config.model_dump_json()} and messages {messages}"
+ )
+
+ # Add the user's message to the conversation history
+ state.messages.append(UserMessage(content=params.content.content))
+
+ # The Agentex server automatically commits input and output messages to the database so you don't need to do this yourself, simply process the input content and return the output content.
+ async with adk.tracing.span(
+ trace_id=params.task.id,
+ name=f"Turn {state.turn_number}",
+ input=state,
+ ) as span:
+ simple_agent_completion_fn = partial(
+ simple_agent_acompletion,
+ model=state.simple_agent_completion_config.model,
+ tools=[
+ TOOL_DICT[tool] for tool in state.simple_agent_completion_config.tools
+ ],
+ max_tokens=state.simple_agent_completion_config.max_tokens,
+ stream=state.simple_agent_completion_config.stream,
+ )
+ # Stream the response and collect the generated messages
+ async for chunk in stream_oldowan_agent_sync(
+ messages=messages,
+ task_id=params.task.id,
+ span=span,
+ simple_agent_acompletion_fn=simple_agent_completion_fn,
+ ):
+ yield chunk
+
+ # The generated messages are accessible from the span output
+ state.messages.extend(span.output)
+
+ state.turn_number += 1
+
+ # Update the state with the new messages
+ await adk.state.update(
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state_id=task_state.id,
+ state=state,
+ trace_id=params.task.id,
+ )
diff --git a/examples/tutorials/00_sync/030_hello_oldowan/requirements.txt b/examples/tutorials/00_sync/030_hello_oldowan/requirements.txt
new file mode 100644
index 000000000..1077299aa
--- /dev/null
+++ b/examples/tutorials/00_sync/030_hello_oldowan/requirements.txt
@@ -0,0 +1,6 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
+scale-oldowan>=0.3.17
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/.dockerignore b/examples/tutorials/10_agentic/00_base/000_hello_acp/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/Dockerfile b/examples/tutorials/10_agentic/00_base/000_hello_acp/Dockerfile
new file mode 100644
index 000000000..34f07ab19
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 000_hello_acp/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 000_hello_acp/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/manifest.yaml b/examples/tutorials/10_agentic/00_base/000_hello_acp/manifest.yaml
new file mode 100644
index 000000000..ca5f5df36
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/manifest.yaml
@@ -0,0 +1,122 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 000_hello_acp
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 000_hello_acp/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 000_hello_acp/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab000-hello-acp
+
+ # Type of ACP to use
+ # sync: Simple synchronous ACP implementation
+ # agentic: Advanced ACP with sub-types "base" or "temporal" (requires config)
+ acp_type: agentic
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that is not intelligent. It just shows how to implement the base agentic ACP type.
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab000-hello-acp"
+ description: "An AgentEx agent that is not intelligent. It just shows how to implement the base agentic ACP type."
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/project/__init__.py b/examples/tutorials/10_agentic/00_base/000_hello_acp/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/project/acp.py b/examples/tutorials/10_agentic/00_base/000_hello_acp/project/acp.py
new file mode 100644
index 000000000..069dc0b35
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/project/acp.py
@@ -0,0 +1,75 @@
+import json
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.acp import CancelTaskParams, CreateTaskParams, SendEventParams
+
+from agentex.types.text_content import TextContent
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server with base configuration
+# This sets up the core server that will handle task creation, events, and cancellation
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(
+ type="base",
+ ),
+)
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # This handler is called first whenever a new task is created.
+ # It's a good place to initialize any state or resources needed for the task.
+
+ #########################################################
+ # 1. (👋) Do task initialization here.
+ #########################################################
+
+ # Acknowledge that the task has been created.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your task. Normally you can do some state initialization here, or just pass and do nothing until you get your first event. For now I'm just acknowledging that I've received a task with the following params:\n\n{json.dumps(params.params, indent=2)}.\n\nYou should only see this message once, when the task is created. All subsequent events will be handled by the `on_task_event_send` handler.",
+ ),
+ )
+
+@acp.on_task_event_send
+async def handle_event_send(params: SendEventParams):
+ # This handler is called whenever a new event (like a message) is sent to the task
+
+ #########################################################
+ # 2. (👋) Echo back the client's message to show it in the UI.
+ #########################################################
+
+ # This is not done by default so the agent developer has full control over what is shown to the user.
+ if params.event.content:
+ await adk.messages.create(task_id=params.task.id, content=params.event.content)
+
+ #########################################################
+ # 3. (👋) Send a simple response message.
+ #########################################################
+
+ # In future tutorials, this is where we'll add more sophisticated response logic.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your message. I can't respond right now, but in future tutorials we'll see how you can get me to intelligently respond to your message.",
+ ),
+ )
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ # This handler is called when a task is cancelled.
+ # It's useful for cleaning up any resources or state associated with the task.
+
+ #########################################################
+ # 4. (👋) Do task cleanup here.
+ #########################################################
+
+ # This is mostly for durable workflows that are cancellable like Temporal, but we will leave it here for demonstration purposes.
+ logger.info(f"Hello! I've received task cancel for task {params.task.id}: {params.task}. This isn't necessary for this example, but it's good to know that it's available.")
diff --git a/examples/tutorials/10_agentic/00_base/000_hello_acp/requirements.txt b/examples/tutorials/10_agentic/00_base/000_hello_acp/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/000_hello_acp/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/.dockerignore b/examples/tutorials/10_agentic/00_base/010_multiturn/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/Dockerfile b/examples/tutorials/10_agentic/00_base/010_multiturn/Dockerfile
new file mode 100644
index 000000000..29f524911
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 010_multiturn/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 010_multiturn/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/README.md b/examples/tutorials/10_agentic/00_base/010_multiturn/README.md
new file mode 100644
index 000000000..30e18800c
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/README.md
@@ -0,0 +1,21 @@
+# [Agentic] (Base) Echo
+
+This is a simple AgentEx agent that just says hello and acknowledges the user's message to show which ACP methods need to be implemented for the base agentic ACP type.
+
+## Building the Agent
+
+To build the agent Docker image locally:
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
+## Official Documentation
+
+[000 Hello Base Agentic](https://agentex.scale.com/docs/tutorials/agentic/000_hello_base_agentic)
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/manifest.yaml b/examples/tutorials/10_agentic/00_base/010_multiturn/manifest.yaml
new file mode 100644
index 000000000..4ad60c950
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/manifest.yaml
@@ -0,0 +1,122 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 010_multiturn
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 010_multiturn/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 010_multiturn/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab010-multiturn
+
+ # Type of ACP to use
+ # sync: Simple synchronous ACP implementation
+ # agentic: Advanced ACP with sub-types "base" or "temporal" (requires config)
+ acp_type: agentic
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that echoes back the user's message
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab010-multiturn"
+ description: "An AgentEx agent that echoes back the user's message"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/project/__init__.py b/examples/tutorials/10_agentic/00_base/010_multiturn/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/project/acp.py b/examples/tutorials/10_agentic/00_base/010_multiturn/project/acp.py
new file mode 100644
index 000000000..e6d6f8cc9
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/project/acp.py
@@ -0,0 +1,153 @@
+import os
+from typing import List
+
+from agentex.lib import adk
+from agentex.lib.core.tracing.tracing_processor_manager import (
+ add_tracing_processor_config,
+)
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import CancelTaskParams, CreateTaskParams, SendEventParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.llm_messages import (
+ AssistantMessage,
+ LLMConfig,
+ Message,
+ SystemMessage,
+ UserMessage,
+)
+from agentex.lib.types.tracing import SGPTracingProcessorConfig
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.text_content import TextContent
+
+logger = make_logger(__name__)
+
+# Add a tracing processor
+add_tracing_processor_config(SGPTracingProcessorConfig(
+ sgp_api_key=os.environ.get("SCALE_GP_API_KEY", ""),
+ sgp_account_id=os.environ.get("SCALE_GP_ACCOUNT_ID", "")
+))
+
+# Create an ACP server
+
+# !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+class StateModel(BaseModel):
+ messages: List[Message]
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+
+ #########################################################
+ # 1. Initialize the task state.
+ #########################################################
+
+ state = StateModel(messages=[SystemMessage(content="You are a helpful assistant that can answer questions.")])
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+@acp.on_task_event_send
+async def handle_event_send(params: SendEventParams):
+ # !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+
+ #########################################################
+ # 2. Validate the event content.
+ #########################################################
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ #########################################################
+ # 3. Echo back the user's message so it shows up in the UI.
+ #########################################################
+
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ )
+
+ #########################################################
+ # 4. (👋) If the OpenAI API key is not set, send a message to the user to let them know.
+ #########################################################
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ )
+
+ #########################################################
+ # 5. (👋) Retrieve the task state.
+ #########################################################
+
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+
+ #########################################################
+ # 6. (👋) Add the new user message to the message history
+ #########################################################
+
+ state.messages.append(UserMessage(content=params.event.content.content))
+
+ #########################################################
+ # 7. (👋) Call an LLM to respond to the user's message
+ #########################################################
+
+ # Call an LLM to respond to the user's message
+ chat_completion = await adk.providers.litellm.chat_completion(
+ llm_config=LLMConfig(model="gpt-4o-mini", messages=state.messages),
+ trace_id=params.task.id,
+ )
+ state.messages.append(AssistantMessage(content=chat_completion.choices[0].message.content))
+
+ #########################################################
+ # 8. (👋) Send agent response to client
+ #########################################################
+
+ if chat_completion.choices[0].message:
+ content_str = chat_completion.choices[0].message.content or ""
+ else:
+ content_str = ""
+
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=content_str,
+ ),
+ )
+
+ #########################################################
+ # 9. (👋) Store the messages in the task state for the next turn
+ #########################################################
+
+ await adk.state.update(
+ state_id=task_state.id,
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state=state,
+ trace_id=params.task.id,
+ )
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
+
diff --git a/examples/tutorials/10_agentic/00_base/010_multiturn/requirements.txt b/examples/tutorials/10_agentic/00_base/010_multiturn/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/010_multiturn/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/.dockerignore b/examples/tutorials/10_agentic/00_base/020_streaming/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/Dockerfile b/examples/tutorials/10_agentic/00_base/020_streaming/Dockerfile
new file mode 100644
index 000000000..26ceaf8aa
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 020_streaming/requirements.txt /app/020_streaming/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 020_streaming/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/README.md b/examples/tutorials/10_agentic/00_base/020_streaming/README.md
new file mode 100644
index 000000000..5ca587b3b
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/README.md
@@ -0,0 +1,127 @@
+# ab020-streaming - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+020_streaming/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/manifest.yaml b/examples/tutorials/10_agentic/00_base/020_streaming/manifest.yaml
new file mode 100644
index 000000000..3280ce483
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/manifest.yaml
@@ -0,0 +1,119 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 020_streaming
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 020_streaming/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 020_streaming/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab020-streaming
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: A multiturn AgentEx agent that streams outputs
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab020-streaming"
+ description: "A multiturn AgentEx agent that streams outputs"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/project/__init__.py b/examples/tutorials/10_agentic/00_base/020_streaming/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/project/acp.py b/examples/tutorials/10_agentic/00_base/020_streaming/project/acp.py
new file mode 100644
index 000000000..4e6c698b3
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/project/acp.py
@@ -0,0 +1,130 @@
+import os
+from typing import List
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import CancelTaskParams, CreateTaskParams, SendEventParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.llm_messages import AssistantMessage, LLMConfig, Message, SystemMessage, UserMessage
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.text_content import TextContent
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server
+
+# !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+class StateModel(BaseModel):
+ messages: List[Message]
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+
+ #########################################################
+ # 1. Initialize the task state.
+ #########################################################
+
+ state = StateModel(messages=[SystemMessage(content="You are a helpful assistant that can answer questions.")])
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+@acp.on_task_event_send
+async def handle_event_send(params: SendEventParams):
+ # !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+
+ #########################################################
+ # 2. Validate the event content.
+ #########################################################
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ #########################################################
+ # 3. Echo back the user's message.
+ #########################################################
+
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ )
+
+ #########################################################
+ # 4. If the OpenAI API key is not set, send a message to the user to let them know.
+ #########################################################
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ )
+
+ #########################################################
+ # 5. Retrieve the task state.
+ #########################################################
+
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+
+ #########################################################
+ # 6. Add the new user message to the message history
+ #########################################################
+
+ state.messages.append(UserMessage(content=params.event.content.content))
+
+ #########################################################
+ # 7. (👋) Call an LLM to respond to the user's message
+ #########################################################
+
+ # When we use the streaming version of chat completion, we can either use the `chat_completion_stream_auto_send` method, or we can use the `chat_completion_stream` method. Here is the difference:
+
+ # `chat_completion_stream_auto_send` - This is the "managed version" of the streaming method. It will automatically send the response to the client as an agent TaskMessage.
+
+ # `chat_completion_stream` - This is the "unmanaged version" of the streaming method. It will return a generator of chat completion chunks. You can then do whatever you want with the chunks, such as sending them to the client as an agent message, or storing them in the task state, or whatever you want.
+
+ # Here we use the `chat_completion_stream_auto_send` method.
+ #########################################################
+
+ task_message = await adk.providers.litellm.chat_completion_stream_auto_send(
+ task_id=params.task.id,
+ llm_config=LLMConfig(model="gpt-4o-mini", messages=state.messages, stream=True),
+ trace_id=params.task.id,
+ )
+
+ state.messages.append(AssistantMessage(content=task_message.content.content))
+
+ #########################################################
+ # 8. Store the messages in the task state for the next turn
+ #########################################################
+
+ await adk.state.update(
+ state_id=task_state.id,
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state=state,
+ trace_id=params.task.id,
+ )
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
+
diff --git a/examples/tutorials/10_agentic/00_base/020_streaming/requirements.txt b/examples/tutorials/10_agentic/00_base/020_streaming/requirements.txt
new file mode 100644
index 000000000..2c2ebf15b
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/020_streaming/requirements.txt
@@ -0,0 +1,5 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/.dockerignore b/examples/tutorials/10_agentic/00_base/030_tracing/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/Dockerfile b/examples/tutorials/10_agentic/00_base/030_tracing/Dockerfile
new file mode 100644
index 000000000..84ecb735d
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 030_tracing/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 030_tracing/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/README.md b/examples/tutorials/10_agentic/00_base/030_tracing/README.md
new file mode 100644
index 000000000..936e2ef44
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/README.md
@@ -0,0 +1,141 @@
+# ab030-tracing - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Building the Agent
+
+To build the agent Docker image locally:
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+030_tracing/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/manifest.yaml b/examples/tutorials/10_agentic/00_base/030_tracing/manifest.yaml
new file mode 100644
index 000000000..73a4052a6
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/manifest.yaml
@@ -0,0 +1,119 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 030_tracing
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 030_tracing/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 030_tracing/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab030-tracing
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that demonstrates how to do hierarchical and custom tracing
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab030-tracing"
+ description: "An AgentEx agent that demonstrates how to do hierarchical and custom tracing"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/project/__init__.py b/examples/tutorials/10_agentic/00_base/030_tracing/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/project/acp.py b/examples/tutorials/10_agentic/00_base/030_tracing/project/acp.py
new file mode 100644
index 000000000..04e626cf4
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/project/acp.py
@@ -0,0 +1,152 @@
+import os
+from typing import List
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import CancelTaskParams, CreateTaskParams, SendEventParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.llm_messages import AssistantMessage, LLMConfig, Message, SystemMessage, UserMessage
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.text_content import TextContent
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server
+
+# !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+class StateModel(BaseModel):
+ messages: List[Message]
+ turn_number: int
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+
+ #########################################################
+ # 1. Initialize the task state.
+ #########################################################
+
+ state = StateModel(
+ messages=[SystemMessage(content="You are a helpful assistant that can answer questions.")],
+ turn_number=0,
+ )
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+@acp.on_task_event_send
+async def handle_event_send(params: SendEventParams):
+ # !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+
+ #########################################################
+ # 2. Validate the event content.
+ #########################################################
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ #########################################################
+ # 3. Retrieve the task state.
+ #########################################################
+
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+ state.turn_number += 1
+
+ # Add the new user message to the message history
+ state.messages.append(UserMessage(content=params.event.content.content))
+
+ #########################################################
+ # 4. (👋) Create a tracing span.
+ #########################################################
+
+ # Create a tracing span. All of the Agentex ADK methods are "auto-traced", but by default show up as a flat list associated with a single trace id (which is usually just set to the task id by default).
+ # If you want to create a hierarchical trace, you can do so by creating spans in your business logic and passing the span id to the ADK methods. Traces will be grouped under parent spans for better readability.
+ # If you're not trying to create a hierarchical trace, but just trying to create a custom span to trace something, you can use this too to create a custom span that is associate with your trace by trace ID.
+
+ async with adk.tracing.span(
+ trace_id=params.task.id,
+ name=f"Turn {state.turn_number}",
+ input=state
+ ) as span:
+
+ #########################################################
+ # 5. Echo back the user's message so it shows up in the UI.
+ #########################################################
+
+ # (👋) Notice that we pass the parent_span_id to the ADK methods to create a hierarchical trace.
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ parent_span_id=span.id if span else None,
+ )
+
+ #########################################################
+ # 6. If the OpenAI API key is not set, send a message to the user to let them know.
+ #########################################################
+
+ # (👋) Notice that we pass the parent_span_id to the ADK methods to create a hierarchical trace.
+ if not os.environ.get("OPENAI_API_KEY"):
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ parent_span_id=span.id if span else None,
+ )
+
+ #########################################################
+ # 7. Call an LLM to respond to the user's message
+ #########################################################
+
+ # (👋) Notice that we pass the parent_span_id to the ADK methods to create a hierarchical trace.
+ task_message = await adk.providers.litellm.chat_completion_stream_auto_send(
+ task_id=params.task.id,
+ llm_config=LLMConfig(model="gpt-4o-mini", messages=state.messages, stream=True),
+ trace_id=params.task.id,
+ parent_span_id=span.id if span else None,
+ )
+
+ state.messages.append(AssistantMessage(content=task_message.content.content))
+
+ #########################################################
+ # 8. Store the messages in the task state for the next turn
+ #########################################################
+
+ # (👋) Notice that we pass the parent_span_id to the ADK methods to create a hierarchical trace.
+ await adk.state.update(
+ state_id=task_state.id,
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state=state,
+ trace_id=params.task.id,
+ parent_span_id=span.id if span else None,
+ )
+
+ #########################################################
+ # 9. (👋) Set the span output to the state for the next turn
+ #########################################################
+
+ # (👋) You can store an arbitrary pydantic model or dictionary in the span output. The idea of a span is that it easily allows you to compare the input and output of a span to see what the wrapped function did.
+ # In this case, the state is comprehensive and expressive, so we just store the change in state that occured.
+ span.output = state
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
diff --git a/examples/tutorials/10_agentic/00_base/030_tracing/requirements.txt b/examples/tutorials/10_agentic/00_base/030_tracing/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/030_tracing/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/.dockerignore b/examples/tutorials/10_agentic/00_base/040_other_sdks/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/Dockerfile b/examples/tutorials/10_agentic/00_base/040_other_sdks/Dockerfile
new file mode 100644
index 000000000..ae6ab7ff3
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 040_other_sdks/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 040_other_sdks/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/README.md b/examples/tutorials/10_agentic/00_base/040_other_sdks/README.md
new file mode 100644
index 000000000..2596a8c7e
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/README.md
@@ -0,0 +1,127 @@
+# ab040-other-sdks - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+040_other_sdks/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/manifest.yaml b/examples/tutorials/10_agentic/00_base/040_other_sdks/manifest.yaml
new file mode 100644
index 000000000..8695ab7d4
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/manifest.yaml
@@ -0,0 +1,119 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 040_other_sdks
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 040_other_sdks/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 040_other_sdks/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab040-other-sdks
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that uses other SDKs to show the flexibilty that agents are just code
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab040-other-sdks"
+ description: "An AgentEx agent that uses other SDKs to show the flexibilty that agents are just code"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/project/__init__.py b/examples/tutorials/10_agentic/00_base/040_other_sdks/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/project/acp.py b/examples/tutorials/10_agentic/00_base/040_other_sdks/project/acp.py
new file mode 100644
index 000000000..9b149630c
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/project/acp.py
@@ -0,0 +1,376 @@
+import os
+from typing import Dict, List, Optional
+from contextlib import AsyncExitStack, asynccontextmanager
+import json
+
+from agentex.lib import adk
+from agentex.lib.core.services.adk.streaming import StreamingTaskMessageContext
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import CancelTaskParams, CreateTaskParams, SendEventParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.task_message_updates import (
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ TextDelta,
+)
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.text_content import TextContent
+from agentex.types.task_message_content import ToolRequestContent, ToolResponseContent
+
+from agents import Agent, Runner
+from agents.mcp import MCPServerStdio
+from mcp import StdioServerParameters
+from openai.types.responses import (
+ ResponseCompletedEvent,
+ ResponseFunctionToolCall,
+ ResponseOutputItemDoneEvent,
+ ResponseTextDeltaEvent,
+)
+from pydantic import BaseModel
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server
+
+# !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+class StateModel(BaseModel):
+ input_list: List[dict]
+ turn_number: int
+
+
+MCP_SERVERS = [
+ StdioServerParameters(
+ command="npx",
+ args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
+ ),
+ StdioServerParameters(
+ command="uvx",
+ args=["openai-websearch-mcp"],
+ env={
+ "OPENAI_API_KEY": os.environ.get("OPENAI_API_KEY", "")
+ }
+ ),
+]
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+ state = StateModel(
+ input_list=[],
+ turn_number=0,
+ )
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+@acp.on_task_event_send
+async def handle_event_send(params: SendEventParams):
+ # !!! Warning: Because "Agentic" ACPs are designed to be fully asynchronous, race conditions can occur if parallel events are sent. It is highly recommended to use the "temporal" type in the AgenticACPConfig instead to handle complex use cases. The "base" ACP is only designed to be used for simple use cases and for learning purposes.
+
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+
+ # Retrieve the task state. Each event is handled as a new turn, so we need to get the state for the current turn.
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+ state.turn_number += 1
+
+ # Add the new user message to the message history
+ state.input_list.append({"role": "user", "content": params.event.content.content})
+
+ async with adk.tracing.span(
+ trace_id=params.task.id,
+ name=f"Turn {state.turn_number}",
+ input=state
+ ) as span:
+ # Echo back the user's message so it shows up in the UI. This is not done by default so the agent developer has full control over what is shown to the user.
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ parent_span_id=span.id if span else None,
+ )
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ parent_span_id=span.id if span else None,
+ )
+
+ #########################################################
+ # (👋) Call an LLM to respond to the user's message using custom streaming
+ #########################################################
+
+ # This demonstrates advanced streaming patterns using adk.streaming.
+ # We'll show two different streaming approaches:
+ # 1. Simple streaming with context managers for complete messages (tool calls)
+ # 2. Delta-based streaming for incremental text responses
+ run_result = await run_openai_agent_with_custom_streaming(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ input_list=state.input_list,
+ mcp_server_params=MCP_SERVERS,
+ agent_name="Tool-Enabled Assistant",
+ agent_instructions="""You are a helpful assistant that can answer questions using various tools.
+ You have access to sequential thinking and web search capabilities through MCP servers.
+ Use these tools when appropriate to provide accurate and well-reasoned responses.""",
+ parent_span_id=span.id if span else None,
+ )
+
+ state.input_list = run_result.to_input_list()
+
+ # Store the messages in the task state for the next turn
+ await adk.state.update(
+ state_id=task_state.id,
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state=state,
+ trace_id=params.task.id,
+ parent_span_id=span.id if span else None,
+ )
+
+ # Set the span output to the state for the next turn
+ span.output = state
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
+
+
+########################################################
+# Helper functions that integrate Agentex primitives with other SDKs like OpenAI Agents
+########################################################
+
+
+@asynccontextmanager
+async def mcp_server_context(mcp_server_params: list[StdioServerParameters]):
+ """Context manager for MCP servers."""
+ servers = []
+ for params in mcp_server_params:
+ server = MCPServerStdio(
+ name=f"Server: {params.command}",
+ params=params.model_dump(),
+ cache_tools_list=True,
+ client_session_timeout_seconds=60,
+ )
+ servers.append(server)
+
+ async with AsyncExitStack() as stack:
+ for server in servers:
+ await stack.enter_async_context(server)
+ yield servers
+
+
+def redact_mcp_server_params(
+ mcp_server_params: list[StdioServerParameters],
+) -> list[StdioServerParameters]:
+ """Redact MCP server params."""
+ return [
+ StdioServerParameters(
+ **{k: v for k, v in server_param.model_dump().items() if k != "env"},
+ env={k: "********" for k in server_param.env} if server_param.env else None,
+ )
+ for server_param in mcp_server_params
+ ]
+
+
+async def run_openai_agent_with_custom_streaming(
+ task_id: str,
+ trace_id: str,
+ input_list: list[Dict],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ parent_span_id: Optional[str] = None,
+):
+ """
+ Run an OpenAI agent with custom streaming using adk.streaming.
+
+ This demonstrates advanced streaming patterns using adk.streaming.
+ We'll show two different streaming approaches:
+ 1. Simple streaming with context managers for complete messages (tool calls)
+ 2. Delta-based streaming for incremental text responses
+ """
+
+ tool_call_map: Dict[str, ResponseFunctionToolCall] = {}
+
+ redacted_mcp_server_params = redact_mcp_server_params(mcp_server_params)
+
+ result = None
+ async with adk.tracing.span(
+ trace_id=trace_id,
+ name="run_agent_with_custom_streaming",
+ input={
+ "input_list": input_list,
+ "mcp_server_params": redacted_mcp_server_params,
+ "agent_name": agent_name,
+ "agent_instructions": agent_instructions,
+ },
+ parent_id=parent_span_id,
+ ) as span:
+ async with mcp_server_context(mcp_server_params) as servers:
+ agent = Agent(
+ name=agent_name,
+ instructions=agent_instructions,
+ mcp_servers=servers,
+ )
+
+ # Run with streaming enabled
+ result = Runner.run_streamed(starting_agent=agent, input=input_list)
+
+ #########################################################
+ # (👋) For complete messages like tool calls we will use a with block to create a streaming context, but for text deltas we will use a streaming context that is created and closed manually. To make sure we close all streaming contexts we will track the item_id and close them all at the end.
+ #########################################################
+
+ item_id_to_streaming_context: Dict[str, StreamingTaskMessageContext] = {}
+ unclosed_item_ids: set[str] = set()
+
+ try:
+ # Process streaming events with TaskMessage creation
+ async for event in result.stream_events():
+
+ if event.type == "run_item_stream_event":
+ if event.item.type == "tool_call_item":
+ tool_call_item = event.item.raw_item
+ tool_call_map[tool_call_item.call_id] = tool_call_item
+
+ logger.info(f"Tool call item: {tool_call_item}")
+
+ tool_request_content = ToolRequestContent(
+ author="agent",
+ tool_call_id=tool_call_item.call_id,
+ name=tool_call_item.name,
+ arguments=json.loads(tool_call_item.arguments),
+ )
+
+ # (👋) Create a streaming context for the tool call
+ # Since a tool call is a complete message, we can use a with block to create a streaming context. This will take care of creating a TaskMessage, sending a START event, and sending a DONE event when the context is closed. Of course you will also want to stream the content of the tool call so clients that are subscribed to streaming updates to the task will see the tool call.
+ async with adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_request_content,
+ ) as streaming_context:
+ # The message has already been persisted, but we still need to send an upda
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_request_content,
+ content_type=tool_request_content.type,
+ ),
+ )
+
+ elif event.item.type == "tool_call_output_item":
+ tool_output_item = event.item.raw_item
+
+ tool_response_content = ToolResponseContent(
+ author="agent",
+ tool_call_id=tool_output_item["call_id"],
+ name=tool_call_map[tool_output_item["call_id"]].name,
+ content=tool_output_item["output"],
+ )
+
+ # (👋) Create a streaming context for the tool call output
+ # Since a tool call output is a complete message, we can use a with block to create a streaming context. This will take care of creating a TaskMessage, sending a START event, and sending a DONE event when the context is closed. Of course you will also want to stream the content of the tool call output so clients that are subscribed to streaming updates to the task will see the tool call output.
+ async with adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_response_content,
+ ) as streaming_context:
+ # The message has already been persisted, but we still need to send an update
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_response_content,
+ content_type=tool_response_content.type,
+ ),
+ )
+
+ elif event.type == "raw_response_event":
+ if isinstance(event.data, ResponseTextDeltaEvent):
+ # Handle text delta
+ item_id = event.data.item_id
+
+ # (👋) Create a streaming context for the text delta
+ # Since a text delta is a partial message, we will create a streaming context manually without a with block because we need to persist the context across the for loop.
+ if item_id not in item_id_to_streaming_context:
+ streaming_context = adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=TextContent(
+ author="agent",
+ content="",
+ ),
+ )
+ # (👋) Open the streaming context manually
+ # This will create a TaskMessage and send a START event for you.
+ item_id_to_streaming_context[item_id] = await streaming_context.open()
+
+ # (👋) Add the item_id to the set of unclosed item_ids
+ # This will allow us to close any lingering streaming context when the agent is done.
+ unclosed_item_ids.add(item_id)
+ else:
+ streaming_context = item_id_to_streaming_context[item_id]
+
+ # (👋) Stream the delta through the streaming service
+ # This will send a DELTA event. The context manager will accumulate the content for you into a final message when you close the context.
+ await streaming_context.stream_update(
+ update=StreamTaskMessageDelta(
+ parent_task_message=streaming_context.task_message,
+ delta=TextDelta(text_delta=event.data.delta),
+ ),
+ )
+
+ elif isinstance(event.data, ResponseOutputItemDoneEvent):
+ # Handle item completion
+ item_id = event.data.item.id
+
+ # (👋) Close the streaming context
+ # This will send a DONE event and update the persisted message.
+ if item_id in item_id_to_streaming_context:
+ streaming_context = item_id_to_streaming_context[item_id]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+
+ elif isinstance(event.data, ResponseCompletedEvent):
+ # (👋) Close all remaining streaming contexts
+ # This will send a DONE event and update the persisted messages for all remaining streaming contents. Normally this won't be needed if all messages are closed by the time the agent is done.
+ for item_id in unclosed_item_ids:
+ streaming_context = item_id_to_streaming_context[item_id]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+
+ finally:
+ # (👋) Close all remaining streaming contexts
+ # This will send a DONE event and update the persisted messages for all remaining streaming contents. Normally this won't be needed, but we do it in case any errors occur.
+ for item_id in unclosed_item_ids:
+ streaming_context = item_id_to_streaming_context[item_id]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+ if span:
+ span.output = {
+ "new_items": [
+ item.raw_item.model_dump()
+ if isinstance(item.raw_item, BaseModel)
+ else item.raw_item
+ for item in result.new_items
+ ],
+ "final_output": result.final_output,
+ }
+ return result
diff --git a/examples/tutorials/10_agentic/00_base/040_other_sdks/requirements.txt b/examples/tutorials/10_agentic/00_base/040_other_sdks/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/040_other_sdks/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/.dockerignore b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/.dockerignore
new file mode 100644
index 000000000..c3620f1bc
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/Dockerfile b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/Dockerfile
new file mode 100644
index 000000000..aca315ae6
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 050_hello_oldowan/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 050_hello_oldowan/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/README.md b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/README.md
new file mode 100644
index 000000000..d8c8c12ce
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/README.md
@@ -0,0 +1,17 @@
+# Hello Oldowan Agent
+
+This is a simple example agent that demonstrates the basics of the Agent 2 Client Protocol (ACP) and the AgentEx framework with an integration to oldowan.
+
+## For Development
+Navigate to `tutorials/10_agentic/00_base/050_hello_oldowan`
+
+```bash
+# Generate CodeArtifact configuration for building (run from repo root)
+./setup-build-codeartifact.sh
+
+# Set up local development environment
+uv venv --python 3.12
+source .venv/bin/activate
+
+uv pip install -r requirements.txt --prerelease=allow
+```
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/manifest.yaml b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/manifest.yaml
new file mode 100644
index 000000000..addbb6689
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/manifest.yaml
@@ -0,0 +1,116 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 050_hello_oldowan
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 050_hello_oldowan/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 050_hello_oldowan/.dockerignore
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab050-hello-oldowan
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that uses Oldowan to show the flexibilty that agents are just code
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab050-hello-oldowan"
+ description: "An AgentEx agent that uses Oldowan to show the flexibilty that agents are just code"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/project/__init__.py b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/project/acp.py b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/project/acp.py
new file mode 100644
index 000000000..ad9acc06c
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/project/acp.py
@@ -0,0 +1,435 @@
+import os
+import json
+from typing import Callable, List, Union, Dict
+from functools import partial
+import logging
+
+logger = logging.getLogger(__name__)
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendEventParams, CreateTaskParams, CancelTaskParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.task_message_updates import StreamTaskMessageDelta, StreamTaskMessageFull, TaskMessageUpdate
+from agentex.lib.types.task_message_updates import DeltaType, TextDelta, ToolResponseDelta
+from agentex.lib.core.services.adk.streaming import StreamingTaskMessageContext
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.types.span import Span
+from agentex.lib.types.llm_messages import Message, UserMessage
+from agentex.types.text_content import TextContent
+from agentex.types.task_message_content import TaskMessageContent, ToolRequestContent, ToolResponseContent
+
+from oldowan.tools.internal import google_search
+from oldowan.completions import ToolMessage, ChatCompletionMessage, simple_agent_acompletion, ChoiceDelta
+
+assert os.environ.get("SGP_API_KEY") is not None, "SGP_API_KEY is not set"
+assert os.environ.get("SGP_ACCOUNT_ID") is not None, "SGP_ACCOUNT_ID is not set"
+
+def think(thinking_str: str):
+ """
+ Use the tool to think about something. It will not obtain new information or change the database, but just append the thought to the log. Use it when complex reasoning or some cache memory is needed.
+ Args:
+ thinking_str: A thought to think about.
+ """
+ return
+
+TOOL_DICT = {
+ "google_search": google_search,
+ "think": think,
+}
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+TOOL_RESPONSE_ID_SUFFIX = "_response"
+
+class SimpleAgentCompletionConfig(BaseModel):
+ model: str
+ tools: List[str]
+ max_tokens: int
+ stream: bool
+
+class StateModel(BaseModel):
+ turn_number: int # The number of turns the agent has taken
+ messages: List[Message] # The messages the agent has seen
+ simple_agent_completion_config: SimpleAgentCompletionConfig # The function to call to get an agent response
+
+def convert_choice_delta_to_stream_task_message_deltas(choice_delta: ChoiceDelta, parent_task_message: TaskMessage) -> List[StreamTaskMessageDelta]:
+ """
+ This function converts a ChoiceDelta to a list of StreamTaskMessageDelta objects.
+ Args:
+ choice_delta: The ChoiceDelta to convert.
+ parent_task_message: The parent task message.
+ Returns:
+ A list of StreamTaskMessageDelta objects.
+ """
+ # these are tool requests
+ deltas = []
+ if choice_delta.tool_calls is not None and choice_delta.tool_calls[0].function.name is not None:
+
+ for tool_call in choice_delta.tool_calls:
+ # print(tool_call)
+ # don't stream tool calls yet.
+ # deltas.append(StreamTaskMessageDelta(
+ # index=idx,
+ # content_type=TaskMessageContentType.TOOL_REQUEST,
+ # delta='', # tool_call.function.arguments
+ # ))
+ pass
+ # These are tool responses
+ elif choice_delta.role == "tool":
+ deltas.append(StreamTaskMessageDelta(
+ parent_task_message=parent_task_message,
+ delta=ToolResponseDelta(
+ type=DeltaType.TOOL_RESPONSE,
+ tool_call_id=choice_delta.tool_call_id,
+ name=choice_delta.name,
+ content_delta=choice_delta.content,
+ ),
+ ))
+
+ # These are assistant messages
+ elif choice_delta.content is not None:
+ deltas.append(StreamTaskMessageDelta(
+ parent_task_message=parent_task_message,
+ delta=TextDelta(
+ type=DeltaType.TEXT,
+ text_delta=choice_delta.content,
+ ),
+ ))
+
+ return deltas
+
+def convert_choice_delta_to_message_content(choice_delta: ChoiceDelta) -> TaskMessageContent:
+ """
+ This function converts a ChoiceDelta to a TaskMessageContent object.
+ Args:
+ choice_delta: The ChoiceDelta to convert.
+ Returns:
+ A TaskMessageContent object.
+ """
+ # This converts a ChoiceDelta to a TaskMessage which will instantiate "the box" to send to client
+ if choice_delta.tool_calls is not None:
+ # since we are streaming we can assume we onl need to create a message for the first tool call
+ return ToolRequestContent(
+ author="agent",
+ name=choice_delta.tool_calls[0].function.name,
+ tool_call_id=choice_delta.tool_calls[0].id,
+ arguments={}, # have to start this empty since we are streaming
+ )
+ elif choice_delta.role == "tool":
+ return ToolResponseContent(
+ author="agent",
+ name=choice_delta.name,
+ tool_call_id=choice_delta.tool_call_id,
+ content='', # starting empty because we add to it
+ )
+ elif choice_delta.role == "assistant":
+ return TextContent(
+ author="agent",
+ content='', # starting empty because we add to it
+ )
+ raise ValueError(f"Unknown role: {choice_delta.role}. Failed to convert to TaskMessage")
+
+async def convert_oldowan_message_to_stream_task_message_full(
+ id_to_streaming_context: Dict[str, StreamingTaskMessageContext],
+ oldowan_message: Union[ChatCompletionMessage, ToolMessage],
+ ) -> List[StreamTaskMessageFull]:
+ """
+ This function converts an Oldowan message to a list of StreamTaskMessageFull objects.
+ Args:
+ task_messages: A dictionary of task messages.
+ task_id: The task id.
+ oldowan_message: The Oldowan message to convert.
+ Returns:
+ A list of StreamTaskMessageFull objects.
+ """
+
+ if isinstance(oldowan_message, ChatCompletionMessage):
+ # First create all tool calls
+ if oldowan_message.tool_calls is not None:
+ for tool_call in oldowan_message.tool_calls:
+ task_message_full = StreamTaskMessageFull(
+ parent_task_message=id_to_streaming_context[tool_call.id].task_message,
+ content=ToolRequestContent(
+ author="agent",
+ name=tool_call.function.name,
+ tool_call_id=tool_call.id,
+ arguments=json.loads(tool_call.function.arguments),
+ ),
+ )
+ await id_to_streaming_context[tool_call.id].stream_update(
+ update=task_message_full,
+ )
+
+
+ # Create the assistant messages
+ if oldowan_message.content is not None:
+ task_message_full = StreamTaskMessageFull(
+ parent_task_message=id_to_streaming_context[oldowan_message.id].task_message,
+ content=TextContent(
+ author="agent",
+ content=oldowan_message.content,
+ ),
+ )
+ await id_to_streaming_context[oldowan_message.id].stream_update(
+ update=task_message_full,
+ )
+
+ # Finally create the tool responses
+ elif isinstance(oldowan_message, ToolMessage):
+ task_message_full = StreamTaskMessageFull(
+ parent_task_message=id_to_streaming_context[oldowan_message.tool_call_id + TOOL_RESPONSE_ID_SUFFIX].task_message,
+ content=ToolResponseContent(
+ author="agent",
+ name=oldowan_message.name,
+ content=oldowan_message.content,
+ tool_call_id=oldowan_message.tool_call_id,
+ ),
+ )
+ await id_to_streaming_context[oldowan_message.tool_call_id + TOOL_RESPONSE_ID_SUFFIX].stream_update(
+ update=task_message_full,
+ )
+
+def get_oldowan_message_ids(oldowan_message: Union[ChatCompletionMessage, ToolMessage]) -> List[str]:
+ """
+ This function gets the ids of the oldowan message.
+ Args:
+ oldowan_message: The Oldowan message to get the ids of.
+ Returns:
+ A list of ids.
+ """
+ message_ids = []
+ if isinstance(oldowan_message, ChatCompletionMessage):
+ # check that there is content
+ if oldowan_message.content is not None:
+ message_ids.append(oldowan_message.id)
+
+ # check if there are tool calls
+ if oldowan_message.tool_calls is not None:
+ for tool_call in oldowan_message.tool_calls:
+ message_ids.append(tool_call.id)
+
+ elif isinstance(oldowan_message, ToolMessage):
+ message_ids.append(oldowan_message.tool_call_id + TOOL_RESPONSE_ID_SUFFIX)
+
+ return message_ids
+
+# This will eventually become adk.providers.oldowan.stream_agent_async_auto_send
+async def stream_oldowan_agent_async_auto_send(messages: List[Message], task_id: str, span: Span, simple_agent_acompletion_fn: Callable) -> List[Message]:
+ """
+ Stream an Oldowan agent response to the client.
+ Args:
+ messages: The messages to send to the agent.
+ task_id: The task id.
+ span: The span to use for tracing.
+ Returns:
+ AsyncGenerator[TaskMessageUpdate, None]: A generator of task message updates.
+ """
+ response_stream = await simple_agent_acompletion_fn(messages=messages)
+
+ # This is used to create the current TaskMessage object
+ cur_task_message_id = None
+
+ # This maps id either from message object, tool_call, or tool_response to the TaskMessage object
+ id_to_streaming_context = {}
+
+ # These are messages that have already been sent in "full"
+ persisted_messages = []
+ events = []
+
+ # These are ChoiceDelta objects
+ async for event in response_stream:
+ print(event)
+ if event.role is not None:
+ # if there is a tool call made then check if its a new tool_call_id
+ if event.tool_calls is not None and event.tool_calls[0].id is not None and event.tool_calls[0].id not in id_to_streaming_context:
+ print(f"Role changed: {event.role}")
+ print(f"Tool call id changed: {event.tool_calls[0].id}")
+ cur_task_message_id = event.tool_calls[0].id
+ streaming_context = adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=convert_choice_delta_to_message_content(event),
+ )
+ id_to_streaming_context[event.tool_calls[0].id] = await streaming_context.open()
+ print(f"Created streaming context for tool call: {id_to_streaming_context[event.tool_calls[0].id].task_message}")
+
+
+ # If you are in a tool response, you should check that either the tool_call_id has changed or your last type was not tool
+ elif event.role == "tool" and (event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX not in id_to_streaming_context):
+ print(f"Role changed: {event.role}")
+ print(f"Tool Response id: {event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX}")
+ cur_task_message_id = event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX
+ streaming_context = adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=convert_choice_delta_to_message_content(event),
+ )
+ id_to_streaming_context[event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX] = await streaming_context.open()
+ print(f"Created streaming context for tool response: {id_to_streaming_context[event.tool_call_id + TOOL_RESPONSE_ID_SUFFIX].task_message}")
+
+
+ elif event.role == "assistant" and event.content is not None and event.id not in id_to_streaming_context: # this is an assistant message
+ print(f"Role is: {event.role}")
+ assert hasattr(event, "id"), "Event does not have an id, please upgrade to latest oldowan"
+ print(f"Event id: {event.id}")
+ cur_task_message_id = event.id
+ streaming_context = adk.streaming.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=convert_choice_delta_to_message_content(event),
+ )
+ id_to_streaming_context[event.id] = await streaming_context.open()
+ print(f"Created streaming context for assistant message: {id_to_streaming_context[event.id].task_message}")
+
+
+
+ # Now we can create the items to stream
+ # NOTE: key assumption is that ChoiceDeltaToolCall can only apply to one tool call at a time.
+ for task_message_delta in convert_choice_delta_to_stream_task_message_deltas(event, parent_task_message=id_to_streaming_context[cur_task_message_id].task_message):
+ streaming_context = id_to_streaming_context[cur_task_message_id]
+ await streaming_context.stream_update(
+ update=task_message_delta,
+ )
+
+ events.append(event)
+
+ # Issue is that we can either have an oldowan message before a task message has been created OR task message before the oldowan message
+ # this is because tool response messages are added to messages immediately, but streamed one after the other.
+ # For each oldowan message, if we haven't persisted it yet, then do so
+ for idx, oldowan_message in enumerate(response_stream.messages):
+ if oldowan_message not in persisted_messages and all([id in id_to_streaming_context for id in get_oldowan_message_ids(oldowan_message)]):
+ async with adk.tracing.span(
+ trace_id=task_id,
+ parent_id=span.id,
+ name=f"Message {idx}",
+ input=messages + response_stream.messages[:idx], # input messages to this message
+ ) as message_span:
+ message_span.output = oldowan_message
+
+ # Send the full messages now that they are done
+ await convert_oldowan_message_to_stream_task_message_full(
+ id_to_streaming_context=id_to_streaming_context,
+ oldowan_message=oldowan_message
+ )
+
+ print(f"Persisted message: {oldowan_message}")
+ persisted_messages.append(oldowan_message)
+
+ # Stream the last object
+ async with adk.tracing.span(
+ trace_id=task_id,
+ parent_id=span.id,
+ name=f"Message {len(response_stream.messages)}",
+ input=messages + response_stream.messages[:-1],
+ ) as message_span:
+ message_span.output = response_stream.messages[-1]
+
+ # Persist the last message to the DB
+ await convert_oldowan_message_to_stream_task_message_full(
+ id_to_streaming_context=id_to_streaming_context,
+ oldowan_message=response_stream.messages[-1]
+ )
+ print(f"Persisted message: {response_stream.messages[-1]}")
+ persisted_messages.append(response_stream.messages[-1])
+
+ # Close all the streaming contexts
+ for streaming_context in id_to_streaming_context.values():
+ if not streaming_context._is_closed:
+ print(f"Closing streaming context for message ID: {streaming_context.task_message.id}. Is closed: {streaming_context._is_closed}")
+ await streaming_context.close()
+
+ # Aggregate the messages and store the output
+ return response_stream.messages
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+ state = StateModel(
+ simple_agent_completion_config=SimpleAgentCompletionConfig(
+ model="openai/gpt-4o",
+ tools=["google_search", "think"],
+ max_tokens=8192,
+ stream=True,
+ ),
+ messages=[],
+ turn_number=0,
+ )
+ assert all([tool in TOOL_DICT for tool in state.simple_agent_completion_config.tools]), f"Invalid tool: {state.simple_agent_completion_config.tools}"
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+# Note: The return of this handler is required to be persisted by the Agentex Server
+@acp.on_task_event_send
+async def handle_message_send(params: SendEventParams):
+ #########################################################
+ # 1-3. These steps are all the same as the hello acp tutorial.
+ #########################################################
+
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ print(f"Received event: {params.event.content}")
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ )
+
+ # Try to retrieve the state. If it doesn't exist, create it.
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+
+ messages = state.messages
+
+ #########################################################
+ # 4. Call an LLM to respond to the user's message and stream the response to the client.
+ #########################################################
+ print(f"Calling LLM with model {state.simple_agent_completion_config.model_dump_json()} and messages {messages}")
+
+ # Add the user's message to the conversation history
+ state.messages.append(UserMessage(content=params.event.content.content))
+
+ # The Agentex server automatically commits input and output messages to the database so you don't need to do this yourself, simply process the input content and return the output content.
+ async with adk.tracing.span(
+ trace_id=params.task.id,
+ name=f"Turn {state.turn_number}",
+ input=state,
+ ) as span:
+ simple_agent_completion_fn = partial(
+ simple_agent_acompletion,
+ model=state.simple_agent_completion_config.model,
+ tools=[TOOL_DICT[tool] for tool in state.simple_agent_completion_config.tools],
+ max_tokens=state.simple_agent_completion_config.max_tokens,
+ stream=state.simple_agent_completion_config.stream,
+ )
+
+ # Stream the response and collect the generated messages
+ messages = await stream_oldowan_agent_async_auto_send(messages=messages, task_id=params.task.id, span=span, simple_agent_acompletion_fn=simple_agent_completion_fn)
+
+ # The generated messages are accessible from the span output
+ state.messages.extend(messages)
+
+ state.turn_number += 1
+
+ # Update the state with the new messages
+ await adk.state.update(
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state_id=task_state.id,
+ state=state,
+ trace_id=params.task.id,
+ )
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/050_hello_oldowan/requirements.txt b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/requirements.txt
new file mode 100644
index 000000000..7898056cd
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/050_hello_oldowan/requirements.txt
@@ -0,0 +1,6 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
+scale-oldowan==0.3.17
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/.dockerignore b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/.dockerignore
new file mode 100644
index 000000000..c3620f1bc
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/Dockerfile b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/Dockerfile
new file mode 100644
index 000000000..112b41a4e
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 060_hello_egp_services/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 060_hello_egp_services/project /app/project
+
+WORKDIR /app/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/README.md b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/README.md
new file mode 100644
index 000000000..b194e9ab8
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/README.md
@@ -0,0 +1,17 @@
+# Hello EGP Services Agent
+
+This is a simple example agent that demonstrates the basics of the Agent 2 Client Protocol (ACP) and the AgentEx framework with an integration to EGP Services.
+
+## For Development
+Navigate to `tutorials/10_agentic/00_base/060_hello_egp_services`
+
+```bash
+# Generate CodeArtifact configuration for building (run from repo root)
+./setup-build-codeartifact.sh
+
+# Set up local development environment
+uv venv --python 3.12
+source .venv/bin/activate
+
+uv pip install -r requirements.txt --prerelease=allow
+```
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/manifest.yaml b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/manifest.yaml
new file mode 100644
index 000000000..5269bc918
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/manifest.yaml
@@ -0,0 +1,116 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 060_hello_egp_services
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 060_hello_egp_services/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 060_hello_egp_services/.dockerignore
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab060-hello-egp-services
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that uses EGP Services to show the flexibilty that agents are just code
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab060-hello-egp-services"
+ description: "An AgentEx agent that uses EGP Services to show the flexibilty that agents are just code"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/project/__init__.py b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/project/acp.py b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/project/acp.py
new file mode 100644
index 000000000..93e4830ea
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/project/acp.py
@@ -0,0 +1,264 @@
+import os
+import json
+import logging
+from datetime import datetime
+from jinja2 import Template
+from typing import List, Union
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendEventParams, CreateTaskParams, CancelTaskParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.lib.types.llm_messages import Message, UserMessage, SystemMessage
+from agentex.types.text_content import TextContent
+from agentex.types.task_message_content import TaskMessageContent, ToolRequestContent, ToolResponseContent
+
+from egp_services.nodes import ToolGenerationNode, RetrieverNode, ChatGenerationNode
+from egp_services.nodes.generation.tool_generation import ToolConfig
+from oldowan.completions import ToolMessage, ChatCompletionMessage
+
+logger = logging.getLogger(__name__)
+
+assert os.environ.get("SGP_API_KEY") is not None, "SGP_API_KEY is not set"
+assert os.environ.get("SGP_ACCOUNT_ID") is not None, "SGP_ACCOUNT_ID is not set"
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base"),
+)
+
+
+
+TOOL_GENERATION_NODE = ToolGenerationNode(
+ model="openai/gpt-4o",
+ tools=[
+ ToolConfig(name="nodes.ChatGenerationNode", init_kwargs={'llm_model' : 'anthropic/claude-3-7-sonnet-20250219'}),
+ ToolConfig(name="internal.google_search"),
+ ],
+ client_kwargs={"api_key": os.environ.get("SGP_API_KEY"), "account_id": os.environ.get("SGP_ACCOUNT_ID")}
+)
+
+CHAT_GENERATION_NODE = ChatGenerationNode(
+ model="openai/gpt-4o",
+ client_kwargs={"api_key": os.environ.get("SGP_API_KEY"), "account_id": os.environ.get("SGP_ACCOUNT_ID")}
+)
+
+RETRIEVER_NODE = RetrieverNode(
+ knowledge_base_id="bb9095d0-a93a-4353-a9f5-44c54d0060ac",
+ client_kwargs={"api_key": os.environ.get("SGP_API_KEY"), "account_id": os.environ.get("SGP_ACCOUNT_ID")}
+)
+
+CREATE_RETRIEVAL_QUERY_USER_PROMPT = """
+You are a helpful assistant that creates a retrieval query for a knowledge base based on the current state of the conversation.
+
+Here is the current state of the conversation:
+
+{% for message in messages %}
+{{ message.role }}: {{ message.content }}
+{% endfor %}
+
+Now create a retrieval query for the knowledge base.
+"""
+
+TOOL_ENABLED_ASSISTANT_SYSTEM_PROMPT = """
+You are a helpful assistant that uses tools to answer questions.
+
+Here is some context for the conversation:
+
+{% for chunk in chunks %}
+
+Chunk ID: {{ chunk.chunk_id }}
+
+{{ chunk.text }}
+
+{% endfor %}
+
+Good luck!
+"""
+
+TOOL_RESPONSE_ID_SUFFIX = "_response"
+
+class StateModel(BaseModel):
+ turn_number: int # The number of turns the agent has taken
+ messages: List[Message] # The messages the agent has seen
+
+# Converts an egp service message to an agentex task message
+def convert_message_to_task_message(message: Union[ChatCompletionMessage, ToolMessage]) -> List[TaskMessageContent]:
+ task_messages = []
+ if isinstance(message, ChatCompletionMessage):
+ # Always return the ChatCompletionMessage first
+ if message.content is not None:
+ task_messages.append(TextContent(
+ author="agent",
+ content=message.content,
+ ))
+
+ # Then add on the tool calls
+ if message.tool_calls is not None:
+ for tool_call in message.tool_calls:
+ task_messages.append(ToolRequestContent(
+ author="agent",
+ name=tool_call.function.name,
+ arguments=json.loads(tool_call.function.arguments),
+ ))
+
+ # FInally add the Tool REsponse
+ elif isinstance(message, ToolMessage):
+ task_messages.append(ToolResponseContent(
+ author="agent",
+ content=message.content,
+ name=message.name,
+ ))
+ return task_messages
+
+
+async def handle_turn(task_id: str, state: StateModel, content: str):
+ """Shared function for handling a turn in the task"""
+ # Echo back the user's initial message
+ await adk.messages.create(
+ task_id=task_id,
+ content=TextContent(
+ author="user",
+ content=content,
+ ),
+ trace_id=task_id,
+ )
+
+ # Add the user's message to the state
+ state.messages.append(UserMessage(content=content))
+
+ # Create a span for the entire turn
+ async with adk.tracing.span(
+ trace_id=task_id,
+ name=f"Turn {state.turn_number}",
+ input=state,
+ ) as span:
+ # 1. Summarize the current state
+ retrieval_query_messages = [
+ UserMessage(content=Template(CREATE_RETRIEVAL_QUERY_USER_PROMPT).render(messages=state.messages)),
+ ]
+ async with adk.tracing.span(
+ trace_id=task_id,
+ name=f"Create Retrieval Query",
+ parent_id=span.id,
+ input={"retrieval_query_messages": retrieval_query_messages},
+ ) as retrieval_query_span:
+ retrieval_query = CHAT_GENERATION_NODE(
+ messages=retrieval_query_messages,
+ )
+ retrieval_query_span.end_time = datetime.now()
+ retrieval_query_span.output = {"retrieval_query": retrieval_query}
+
+ print(f"Retrieval query about to be sent: {retrieval_query} - class: {type(retrieval_query)} - class name: {type(retrieval_query).__name__}")
+
+ # 2. Do a retrieval function
+ async with adk.tracing.span(
+ trace_id=task_id,
+ name=f"Retrieve Chunks",
+ parent_id=span.id,
+ input={"retrieval_query": retrieval_query},
+ ) as retrieve_chunks_span:
+ chunks = RETRIEVER_NODE(query=retrieval_query.output, num_to_return=2)
+ retrieve_chunks_span.end_time = datetime.now()
+ retrieve_chunks_span.output = {"chunks": chunks}
+
+
+ # 3. Do a tool enabled generation
+ tool_enabled_llm_messages = [
+ SystemMessage(content=Template(TOOL_ENABLED_ASSISTANT_SYSTEM_PROMPT).render(chunks=chunks)),
+ *state.messages,
+ ]
+ # Trace the full node
+ async with adk.tracing.span(
+ trace_id=task_id,
+ name=f"Generate Response",
+ parent_id=span.id,
+ input={"tool_enabled_llm_messages": tool_enabled_llm_messages},
+ ) as generate_response_span:
+ messages = await TOOL_GENERATION_NODE.async_call(
+ messages=tool_enabled_llm_messages,
+ )
+
+ # For each message, trace it and send it to the client
+ for idx, message in enumerate(messages):
+ async with adk.tracing.span(
+ trace_id=task_id,
+ name=f"Message {idx}",
+ parent_id=generate_response_span.id,
+ input={"messages": tool_enabled_llm_messages + messages[:idx]},
+ ) as message_span:
+ task_messages = convert_message_to_task_message(message)
+ for task_message in task_messages:
+ await adk.messages.create(
+ task_id=task_id,
+ content=task_message,
+ trace_id=task_id,
+ parent_span_id=message_span.id,
+ )
+
+ message_span.output = {"message": message}
+
+ generate_response_span.end_time = datetime.now()
+ generate_response_span.output = {"messages": messages}
+
+ # Update the task state with the new messages
+ state.messages.extend(messages)
+ state.turn_number += 1
+
+ span.end_time = datetime.now()
+
+ return state
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # Upon task creation, we initialize the task state with a system message.
+ # This will be fetched by the `on_task_event_send` handler when each event is sent.
+ state = StateModel(
+ messages=[],
+ turn_number=0,
+ )
+ await adk.state.create(task_id=params.task.id, agent_id=params.agent.id, state=state)
+
+
+# Note: The return of this handler is required to be persisted by the Agentex Server
+@acp.on_task_event_send
+async def handle_message_send(params: SendEventParams):
+ #########################################################
+ # 1-3. These steps are all the same as the hello acp tutorial.
+ #########################################################
+
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ # Try to retrieve the state. If it doesn't exist, create it.
+ task_state = await adk.state.get_by_task_and_agent(task_id=params.task.id, agent_id=params.agent.id)
+ state = StateModel.model_validate(task_state.state)
+
+ #########################################################
+ # 4. Call an LLM to respond to the user's message and stream the response to the client.
+ #########################################################
+
+ state = await handle_turn(task_id=params.task.id, state=state, content=params.event.content.content)
+
+ # Update the state with the new messages
+ await adk.state.update(
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ state_id=task_state.id,
+ state=state,
+ trace_id=params.task.id,
+ )
+
+@acp.on_task_cancel
+async def handle_task_cancel(params: CancelTaskParams):
+ """Default task cancel handler"""
+ logger.info(f"Task canceled: {params.task}")
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/060_hello_egp_services/requirements.txt b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/requirements.txt
new file mode 100644
index 000000000..0cd249f11
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/060_hello_egp_services/requirements.txt
@@ -0,0 +1,7 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
+egp-services
+openai-agents!=0.0.17
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/.dockerignore b/examples/tutorials/10_agentic/00_base/080_batch_events/.dockerignore
new file mode 100644
index 000000000..c4f7a8b4b
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/.dockerignore
@@ -0,0 +1,43 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/Dockerfile b/examples/tutorials/10_agentic/00_base/080_batch_events/Dockerfile
new file mode 100644
index 000000000..1c5138b5e
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/Dockerfile
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 10_agentic/00_base/080_batch_events/requirements.txt /app/10_agentic/00_base/080_batch_events/requirements.txt
+
+WORKDIR /app/10_agentic/00_base/080_batch_events
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 10_agentic/00_base/080_batch_events/project /app/10_agentic/00_base/080_batch_events/project
+
+WORKDIR /app/10_agentic/00_base/080_batch_events/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/README.md b/examples/tutorials/10_agentic/00_base/080_batch_events/README.md
new file mode 100644
index 000000000..a6058fb20
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/README.md
@@ -0,0 +1,174 @@
+# ab080-batch-events - AgentEx Starter Template
+
+This is a tutorial demonstrating **batch event processing** and the **limitations of the base agentic ACP protocol**.
+
+## 🎯 Tutorial Purpose
+
+This tutorial demonstrates three key concepts:
+
+1. **Events and Cursor Usage**: How to use `adk.events.list_events()` with `last_processed_event_id` to track processing progress
+2. **Cursor Committing**: How to "commit" the cursor by updating `last_processed_event_id` in the AgentTaskTracker
+3. **Base ACP Limitations**: Real-world limitations when building distributed agents with the basic agentic ACP protocol
+
+## ⚠️ Important Limitations
+
+### **Primary Limitation (Race Conditions)**
+The code includes this critical limitation:
+```python
+# LIMITATION - because this is not atomic, it is possible that two different
+# processes will read the value of READY and then both will try to set it to
+# PROCESSING. The only way to prevent this is locking, which is not supported
+# by the agentex server.
+```
+
+**Problem**: Multiple pods can simultaneously check status=READY and both proceed to process events, leading to duplicate work.
+
+### **Additional Distributed System Limitations**
+
+1. **Server Crash Recovery**: If the agent server dies while processing events, there's no clean way to restart processing from where it left off. The status remains "PROCESSING" indefinitely.
+
+2. **Cursor Commit Failures**: If the server fails to commit the cursor (`last_processed_event_id`) after writing a message, retrying will lead to duplicate messages being written for the same events.
+
+3. **No Transactional Guarantees**: There's no way to atomically update both the message store and the cursor position, leading to potential inconsistencies.
+
+4. **Base ACP Protocol Constraints**: These issues cannot be solved with the simple agentic base ACP protocol alone - they require more sophisticated coordination mechanisms.
+
+## 🔧 Solutions
+
+The limitations above highlight why more advanced patterns are needed for production systems:
+
+**Options for Production**:
+1. **Database Locking**: Implement your own database locking mechanism and provide the agent with database credentials
+2. **Temporal Workflows**: Use Temporal to ensure only one workflow execution processes events at a time (eliminates the need for manual locking)
+3. **Message Queues**: Use external queue systems with built-in exactly-once delivery guarantees
+
+## 🎯 Batching Demonstration
+
+Despite the limitations, this tutorial effectively demonstrates **event batching behavior**:
+
+- Events arriving during the 2-second processing delay get queued
+- When processing completes, all queued events are processed together in the next batch
+- This shows how slow agents can efficiently handle bursts of events
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+080_batch_events/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Start the Agentex Server**
+```bash
+# Navigate to the backend directory
+cd agentex
+
+# Start all services using Docker Compose
+make dev
+
+# Optional: In a separate terminal, use lazydocker for a better UI (everything should say "healthy")
+lzd
+```
+
+3. **Run your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents run --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/manifest.yaml b/examples/tutorials/10_agentic/00_base/080_batch_events/manifest.yaml
new file mode 100644
index 000000000..857576875
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/manifest.yaml
@@ -0,0 +1,118 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../../../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - agentex-py
+ - 10_agentic/00_base/080_batch_events
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 10_agentic/00_base/080_batch_events/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 10_agentic/00_base/080_batch_events/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: ab080-batch-events
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # OPENAI_API_KEY: ""
+ # OPENAI_BASE_URL: ""
+ # OPENAI_ORG_ID: ""
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "ab080-batch-events"
+ description: "An AgentEx agent"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/project/__init__.py b/examples/tutorials/10_agentic/00_base/080_batch_events/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/project/acp.py b/examples/tutorials/10_agentic/00_base/080_batch_events/project/acp.py
new file mode 100644
index 000000000..eb72d68d2
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/project/acp.py
@@ -0,0 +1,237 @@
+"""
+WARNING: This tutorial is NOT something that is production ready. It is meant for a demonstration of how to handle a bulk of events in an agentic ACP.
+
+THere are many limitations with trying to do something similar to this. Please see the README.md for more details.
+"""
+import asyncio
+from enum import Enum
+
+from agentex.lib import adk
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendEventParams, CancelTaskParams, CreateTaskParams
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.utils.logging import make_logger
+from agentex.types.text_content import TextContent
+
+logger = make_logger(__name__)
+
+
+class TaskCancelledError(Exception):
+ pass
+
+
+class Status(Enum):
+ PROCESSING = "processing"
+ READY = "ready"
+ CANCELLED = "cancelled"
+
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base")
+)
+
+async def process_events_batch(events, task_id: str) -> str:
+ """
+ Process a batch of events with 2s sleep per event to simulate work.
+ Returns the ID of the last processed event.
+ """
+ if not events:
+ return None
+
+ logger.info(f"🔄 Processing {len(events)} events: {[e.id for e in events]}")
+
+ # Sleep for 2s per event to simulate processing work
+ for event in events:
+ await asyncio.sleep(5)
+ logger.info(f" INSIDE PROCESSING LOOP - FINISHED PROCESSING EVENT {event.id}")
+
+ # Create message showing what was processed
+ event_ids = [event.id for event in events]
+ message_content = TextContent(
+ author="agent",
+ content=f"Processed event IDs: {event_ids}"
+ )
+
+ await adk.messages.create(
+ task_id=task_id,
+ content=message_content
+ )
+
+ final_cursor = events[-1].id
+ logger.info(f"📝 Message created for {len(events)} events (cursor: {final_cursor})")
+ return final_cursor
+
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams) -> None:
+ # For this tutorial, we print the parameters sent to the handler
+ # so you can see where and how task creation is handled
+
+ logger.info(f"Task created: {params.task.id} for agent: {params.agent.id}")
+
+ # The AgentTaskTracker is automatically created by the server when a task is created
+ # Let's verify it exists and log its initial state
+ try:
+ tracker = await adk.agent_task_tracker.get_by_task_and_agent(
+ task_id=params.task.id,
+ agent_id=params.agent.id
+ )
+ logger.info(f"AgentTaskTracker found: {tracker.id}, status: {tracker.status}, last_processed_event_id: {tracker.last_processed_event_id}")
+ except Exception as e:
+ logger.error(f"Error getting AgentTaskTracker: {e}")
+
+ logger.info("Task creation complete")
+ return
+
+
+@acp.on_task_event_send
+async def handle_task_event_send(params: SendEventParams) -> None:
+ """
+ NOTE: See the README.md for a set of limitations as to why this is not the best way to handle events.
+
+ Handle incoming events with batching behavior.
+
+ Demonstrates how events arriving during PROCESSING get queued and batched:
+ 1. Check status - skip if CANCELLED or already PROCESSING
+ 2. Set status to PROCESSING
+ 3. Process events in batches until no more arrive
+ 4. Set status back to READY
+
+ The key insight: while this agent is sleeping 2s per event, new events
+ can arrive and will be batched together in the next processing cycle.
+ """
+ logger.info(f"📥 Received event: {params.event.id}")
+
+ # Get the current AgentTaskTracker state
+ try:
+ tracker = await adk.agent_task_tracker.get_by_task_and_agent(
+ task_id=params.task.id,
+ agent_id=params.agent.id
+ )
+ logger.info(f"Current tracker status: {tracker.status}, cursor: {tracker.last_processed_event_id}")
+ except Exception as e:
+ logger.error(f"Error getting AgentTaskTracker: {e}")
+ return
+
+ # Skip if task is cancelled
+ if tracker.status == Status.CANCELLED.value:
+ logger.error("❌ Task is cancelled. Skipping.")
+ return
+
+ # Skip if already processing (another pod is handling it)
+ if tracker.status == Status.PROCESSING.value:
+ logger.info("⏭️ Task is already being processed by another pod. Skipping.")
+ return
+
+ # LIMITATION - because this is not atomic, it is possible that two different processes will read the value of true
+ # and then both will try to set it to processing. The only way to prevent this is locking, which is not supported
+ # by the agentex server.
+ #
+ # Options:
+ # 1. Implement your own database locking mechanism and provide the agent with the credentials to the database
+ # 2. Use Temporal, which will ensure that there is only one workflow execution to be processing at a time (thus not needing a lock anymore)
+ # Update status to PROCESSING to claim this processing cycle
+ try:
+ tracker = await adk.agent_task_tracker.update(
+ tracker_id=tracker.id,
+ status=Status.PROCESSING.value,
+ status_reason="Processing events in batches"
+
+ )
+ logger.info(f"🔒 Set status to PROCESSING")
+ except Exception as e:
+ logger.error(f"❌ Failed to set status to PROCESSING (another pod may have claimed it): {e}")
+ return
+
+ reset_to_ready = True
+ try:
+ current_cursor = tracker.last_processed_event_id
+ # Main processing loop - keep going until no more new events
+ while True:
+ print(f"\n🔍 Checking for new events since cursor: {current_cursor}")
+
+ tracker = await adk.agent_task_tracker.get(tracker_id=tracker.id)
+ if tracker.status == Status.CANCELLED.value:
+ logger.error("❌ Task is cancelled. Skipping.")
+ raise TaskCancelledError("Task is cancelled")
+
+ # Get all new events since current cursor
+ try:
+ print("Listing events since cursor: ", current_cursor)
+ new_events = await adk.events.list_events(
+ task_id=params.task.id,
+ agent_id=params.agent.id,
+ last_processed_event_id=current_cursor,
+ limit=100
+ )
+
+ if not new_events:
+ print("✅ No more new events found - processing cycle complete")
+ break
+
+ logger.info(f"🎯 BATCH: Found {len(new_events)} events to process")
+
+ except Exception as e:
+ logger.error(f"❌ Error collecting events: {e}")
+ break
+
+ # Process this batch of events (with 2s sleeps)
+ try:
+ final_cursor = await process_events_batch(new_events, params.task.id)
+
+ # Update cursor to mark these events as processed
+ await adk.agent_task_tracker.update(
+ tracker_id=tracker.id,
+ last_processed_event_id=final_cursor,
+ status=Status.PROCESSING.value, # Still processing, might be more
+ status_reason=f"Processed batch of {len(new_events)} events"
+ )
+
+ current_cursor = final_cursor
+ logger.info(f"📊 Updated cursor to: {current_cursor}")
+
+ except Exception as e:
+ logger.error(f"❌ Error processing events batch: {e}")
+ break
+ except TaskCancelledError as e:
+ logger.error(f"❌ Task cancelled: {e}")
+ reset_to_ready = False
+ finally:
+ if not reset_to_ready:
+ return
+
+ # Always set status back to READY when done processing
+ try:
+ await adk.agent_task_tracker.update(
+ tracker_id=tracker.id,
+ status=Status.READY.value,
+ status_reason="Completed event processing - ready for new events"
+ )
+ logger.info(f"🟢 Set status back to READY - agent available for new events")
+ except Exception as e:
+ logger.error(f"❌ Error setting status back to READY: {e}")
+
+
+@acp.on_task_cancel
+async def handle_task_canceled(params: CancelTaskParams):
+ # For this tutorial, we print the parameters sent to the handler
+ # so you can see where and how task cancellation is handled
+ logger.info(f"Hello world! Task canceled: {params.task.id}")
+
+ # Update the AgentTaskTracker to reflect cancellation
+ try:
+ tracker = await adk.agent_task_tracker.get_by_task_and_agent(
+ task_id=params.task.id,
+ agent_id=params.agent.id
+ )
+ await adk.agent_task_tracker.update(
+ tracker_id=tracker.id,
+ status=Status.CANCELLED.value,
+ status_reason="Task was cancelled by user"
+ )
+ logger.info(f"Updated tracker status to cancelled")
+ except Exception as e:
+ logger.error(f"Error updating tracker on cancellation: {e}")
+
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/requirements.txt b/examples/tutorials/10_agentic/00_base/080_batch_events/requirements.txt
new file mode 100644
index 000000000..2c2ebf15b
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/requirements.txt
@@ -0,0 +1,5 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/00_base/080_batch_events/test_batch_events.py b/examples/tutorials/10_agentic/00_base/080_batch_events/test_batch_events.py
new file mode 100644
index 000000000..1763fa2cc
--- /dev/null
+++ b/examples/tutorials/10_agentic/00_base/080_batch_events/test_batch_events.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+"""
+Simple script to test agent RPC endpoints using the actual schemas.
+"""
+
+import httpx
+import json
+import uuid
+import asyncio
+
+# Configuration
+BASE_URL = "http://localhost:5003"
+AGENT_ID = "b4f32d71-ff69-4ac9-84d1-eb2937fea0c7"
+RPC_ENDPOINT = f"{BASE_URL}/agents/{AGENT_ID}/rpc"
+
+async def send_rpc_request(method: str, params: dict):
+ """Send an RPC request to the agent."""
+ request_data = {
+ "jsonrpc": "2.0",
+ "id": str(uuid.uuid4()),
+ "method": method,
+ "params": params
+ }
+
+ print(f"→ Sending: {method}")
+ print(f" Request: {json.dumps(request_data, indent=2)}")
+
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(
+ RPC_ENDPOINT,
+ json=request_data,
+ headers={"Content-Type": "application/json"},
+ timeout=30.0
+ )
+
+ print(f" Status: {response.status_code}")
+
+ if response.status_code == 200:
+ response_data = response.json()
+ print(f" Response: {json.dumps(response_data, indent=2)}")
+ return response_data
+ else:
+ print(f" Error: {response.text}")
+ return None
+
+ except Exception as e:
+ print(f" Failed: {e}")
+ return None
+
+async def main():
+ """Main function to test the agent RPC endpoints."""
+ print(f"🚀 Testing Agent RPC: {AGENT_ID}")
+ print(f"🔗 Endpoint: {RPC_ENDPOINT}")
+ print("=" * 50)
+
+ # Step 1: Create a task
+ print("\n📝 Step 1: Creating a task...")
+ task_response = await send_rpc_request("task/create", {
+ "params": {
+ "description": "Test task from simple script"
+ }
+ })
+
+ if not task_response or task_response.get("error"):
+ print("❌ Task creation failed, continuing anyway...")
+ task_id = str(uuid.uuid4()) # Generate a task ID to continue
+ else:
+ # Extract task_id from response (adjust based on actual response structure)
+ task_id = task_response.get("result", {}).get("id", str(uuid.uuid4()))
+
+ print(f"📋 Using task_id: {task_id}")
+
+ # Step 2: Send messages
+ print("\n📤 Step 2: Sending messages...")
+
+ messages = [f"This is message {i}" for i in range(20)]
+
+ for i, message in enumerate(messages, 1):
+ print(f"\n📨 Sending message {i}/{len(messages)}")
+
+ # Create message content using TextContent structure
+ message_content = {
+ "type": "text",
+ "author": "user",
+ "style": "static",
+ "format": "plain",
+ "content": message
+ }
+
+ # Send message using message/send method
+ response = await send_rpc_request("event/send", {
+ "task_id": task_id,
+ "event": message_content,
+ })
+
+ if response and not response.get("error"):
+ print(f"✅ Message {i} sent successfully")
+ else:
+ print(f"❌ Message {i} failed")
+
+ # Small delay between messages
+ await asyncio.sleep(0.1)
+
+ print("\n" + "=" * 50)
+ print("✨ Script completed!")
+ print(f"📋 Task ID: {task_id}")
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/.dockerignore b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/Dockerfile b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/Dockerfile
new file mode 100644
index 000000000..2ea8841a2
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/Dockerfile
@@ -0,0 +1,51 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install tctl (Temporal CLI)
+RUN curl -L https://github.com/temporalio/tctl/releases/download/v1.18.1/tctl_1.18.1_linux_arm64.tar.gz -o /tmp/tctl.tar.gz && \
+ tar -xzf /tmp/tctl.tar.gz -C /usr/local/bin && \
+ chmod +x /usr/local/bin/tctl && \
+ rm /tmp/tctl.tar.gz
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 000_hello_acp/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 000_hello_acp/project /app/project
+
+WORKDIR /app/project
+
+# Run the ACP server using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
+
+# When we deploy the worker, we will replace the CMD with the following
+# CMD ["python", "-m", "run_worker"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/README.md b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/README.md
new file mode 100644
index 000000000..4d6fa5f92
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/README.md
@@ -0,0 +1,134 @@
+# at000-hello-acp - AgentEx Temporal Agent Template
+
+This is a starter template for building asynchronous agents with the AgentEx framework and Temporal. It provides a basic implementation of the Agent 2 Client Protocol (ACP) with Temporal workflow support to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+- **Temporal Workflows**: Long-running processes that can handle complex state management and async operations
+
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server with Temporal integration
+- Handles each of the required ACP events
+- Provides a foundation for building complex async agents
+- Includes Temporal workflow and activity definitions
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+- Temporal worker configuration
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+000_hello_acp/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ ├── acp.py # ACP server and event handlers
+│ ├── workflow.py # Temporal workflow definitions
+│ ├── activities.py # Temporal activity definitions
+│ └── run_worker.py # Temporal worker setup
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/manifest.yaml b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/manifest.yaml
new file mode 100644
index 000000000..b0da05331
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/manifest.yaml
@@ -0,0 +1,139 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 000_hello_acp
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 000_hello_acp/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 000_hello_acp/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+ # Path to temporal worker file
+ # Examples:
+ # project/run_worker.py (standard)
+ # workers/temporal.py (custom structure)
+ # ../shared/worker.py (shared across projects)
+ worker: project/run_worker.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Type of agent - either sync or agentic
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: at000-hello-acp
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agent that shows how ACP works with Temporal
+
+ # Temporal workflow configuration
+ # This enables your agent to run as a Temporal workflow for long-running tasks
+ temporal:
+ enabled: true
+ workflows:
+ # Name of the workflow class
+ # Must match the @workflow.defn name in your workflow.py
+ - name: at000-hello-acp
+
+ # Queue name for task distribution
+ # Used by Temporal to route tasks to your agent
+ # Convention: _task_queue
+ queue_name: 000_hello_acp_queue
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ imagePullSecrets:
+ - name: my-registry-secret # Update with your image pull secret name
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden using --override-file with custom configuration files
+ global:
+ agent:
+ name: "at000-hello-acp"
+ description: "An AgentEx agent that shows how ACP works with Temporal"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/__init__.py b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/acp.py b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/acp.py
new file mode 100644
index 000000000..d3b8e52d6
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/acp.py
@@ -0,0 +1,31 @@
+import os
+
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import TemporalACPConfig
+
+
+# Create the ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=TemporalACPConfig(
+ # When deployed to the cluster, the Temporal address will automatically be set to the cluster address
+ # For local development, we set the address manually to talk to the local Temporal service set up via docker compose
+ type="temporal",
+ temporal_address=os.getenv("TEMPORAL_ADDRESS", "localhost:7233")
+ )
+)
+
+
+# Notice that we don't need to register any handlers when we use type="temporal"
+# If you look at the code in agentex.sdk.fastacp.impl.temporal_acp
+# You can see that these handlers are automatically registered when the ACP is created
+
+# @acp.on_task_create
+# This will be handled by the method in your workflow that is decorated with @workflow.run
+
+# @acp.on_task_event_send
+# This will be handled by the method in your workflow that is decorated with @workflow.signal(name=SignalName.RECEIVE_MESSAGE)
+
+# @acp.on_task_cancel
+# This does not need to be handled by your workflow.
+# It is automatically handled by the temporal client which cancels the workflow directly
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/run_worker.py b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/run_worker.py
new file mode 100644
index 000000000..ff50fdfe3
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/run_worker.py
@@ -0,0 +1,33 @@
+import asyncio
+import os
+
+from agentex.lib.core.temporal.activities import get_all_activities
+from agentex.lib.core.temporal.workers.worker import AgentexWorker
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.environment_variables import EnvironmentVariables
+
+from workflow import At000HelloAcpWorkflow
+
+
+environment_variables = EnvironmentVariables.refresh()
+
+logger = make_logger(__name__)
+
+
+async def main():
+ task_queue_name = environment_variables.WORKFLOW_TASK_QUEUE
+ if task_queue_name is None:
+ raise ValueError("WORKFLOW_TASK_QUEUE is not set")
+
+ # Create a worker with automatic tracing
+ worker = AgentexWorker(
+ task_queue=task_queue_name,
+ )
+
+ await worker.run(
+ activities=get_all_activities(),
+ workflow=At000HelloAcpWorkflow,
+ )
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/workflow.py b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/workflow.py
new file mode 100644
index 000000000..556a0afb7
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/project/workflow.py
@@ -0,0 +1,71 @@
+import json
+from typing import override
+
+from temporalio import workflow
+
+from agentex.lib import adk
+from agentex.lib.types.acp import CreateTaskParams, SendEventParams
+from agentex.lib.core.temporal.workflows.workflow import BaseWorkflow
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.utils.logging import make_logger
+from agentex.types.text_content import TextContent
+from agentex.lib.environment_variables import EnvironmentVariables
+
+environment_variables = EnvironmentVariables.refresh()
+
+if environment_variables.WORKFLOW_NAME is None:
+ raise ValueError("Environment variable WORKFLOW_NAME is not set")
+
+if environment_variables.AGENT_NAME is None:
+ raise ValueError("Environment variable AGENT_NAME is not set")
+
+logger = make_logger(__name__)
+
+@workflow.defn(name=environment_variables.WORKFLOW_NAME)
+class At000HelloAcpWorkflow(BaseWorkflow):
+ """
+ Minimal async workflow template for AgentEx Temporal agents.
+ """
+ def __init__(self):
+ super().__init__(display_name=environment_variables.AGENT_NAME)
+ self._complete_task = False
+
+ @workflow.signal(name=SignalName.RECEIVE_EVENT)
+ @override
+ async def on_task_event_send(self, params: SendEventParams) -> None:
+ logger.info(f"Received task message instruction: {params}")
+
+ # 2. Echo back the client's message to show it in the UI. This is not done by default so the agent developer has full control over what is shown to the user.
+ await adk.messages.create(task_id=params.task.id, content=params.event.content)
+
+ # 3. Send a simple response message.
+ # In future tutorials, this is where we'll add more sophisticated response logic.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your message. I can't respond right now, but in future tutorials we'll see how you can get me to intelligently respond to your message.",
+ ),
+ )
+
+ @workflow.run
+ @override
+ async def on_task_create(self, params: CreateTaskParams) -> None:
+ logger.info(f"Received task create params: {params}")
+
+ # 1. Acknowledge that the task has been created.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your task. Normally you can do some state initialization here, or just pass and do nothing until you get your first event. For now I'm just acknowledging that I've received a task with the following params:\n\n{json.dumps(params.params, indent=2)}.\n\nYou should only see this message once, when the task is created. All subsequent events will be handled by the `on_task_event_send` handler.",
+ ),
+ )
+
+ # 2. Wait for the task to be completed indefinitely. If we don't do this the workflow will close as soon as this function returns. Temporal can run hundreds of millions of workflows in parallel, so you don't need to worry about too many workflows running at once.
+
+ # Thus, if you want this agent to field events indefinitely (or for a long time) you need to wait for a condition to be met.
+ await workflow.wait_condition(
+ lambda: self._complete_task,
+ timeout=None, # Set a timeout if you want to prevent the task from running indefinitely. Generally this is not needed. Temporal can run hundreds of millions of workflows in parallel and more. Only do this if you have a specific reason to do so.
+ )
diff --git a/examples/tutorials/10_agentic/10_temporal/000_hello_acp/requirements.txt b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/000_hello_acp/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.codeartifact-pip-conf b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.codeartifact-pip-conf
new file mode 100644
index 000000000..0f1920f12
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.codeartifact-pip-conf
@@ -0,0 +1,4 @@
+[global]
+index-url = https://aws:eyJ2ZXIiOjEsImlzdSI6MTc1MTQ4ODczNSwiZW5jIjoiQTEyOEdDTSIsInRhZyI6ImJlcGJ0ZHJuTzZSSG1RWFJHQVRrcFEiLCJleHAiOjE3NTE1MzE5MzUsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiRDlZQU9HM3B5SF82SWFSVSJ9.0uhUrdIA0PX-QOIvCrJcgA._PNXb16DHlDBTHAH.AwHpQ-1Fi0VF3Tr_v4nayYbWDnVr5YrusiTanpvVHbL1IjsjRJDFLWNrB7WqU1hBomFHvNJT9Sz4WY6NqoYAZDbhtbP0WJTb3rIr26tvfQQEVRPmx89wXX58rNs53O_f8Aiar0E4r0VbFwbMq6LCUodqJsfV7IlhaeOLvjIJObKonvFNOKMpsENmgjnXcfLVFXGHOkS3HTRbrGU-v1hiMnR0-AvNHyU7anuqqMJoYVDHOC1h66Tt8kQRgSYpVTmPTjGeR2_aPYoVcHAP0LXr0DFDP61lodmQCwAWaAQTxyjiBXFo9XHRwEWILFOiiqknSBSaJkjv-O-23mzStnbK8aqLHpJKO4wDOHN7EKj0_cHoNMPEs3vRxPn2_4pSYV_pB2Zw-3iHewID-mHMGe8oMR-ItsivXDJL8TH4tMMW7O7oSLr1BSPl0oCA3dWPOp3zbbXBE4JmITm7vSONScwZV7RsdX_ouvt07ioCYLE145apEH6AKTxPTfqjbyZDFiIsZGW0WBEzCvD26bhq1eunxPygS8Sk4x-elwx6-gMfWO3nzCBgA5qsGoSAeBCewp2vM0YH02Df-rMfgI5yI0TlxkwDSU_N-VfUOvqbg_4mdSyYYEyI5MHi_o4M9KtFiE6iRxBzvmvfzBef6FH-NwHsIsrd5XJZCkWQLzfotIHrbli-jUO_uRlxjHylD4uIYl5VG0kex6YKHinnj4ma_GT5mqtMTNveI-KpnXnjoCLTBMtWSwUAMXbbncLpTxOOVmYIZV0FHUVHPsNT_dr4H9NAhrxH4furq3oEEElPn8VOQBxgSW99phpSjJxnYJ8-k4ergWm_YeF1hErL9pminCM7FEh_Voy_-i4AxssA-cpQngZmRSRLzKtRF7NJng3zi1LyLZ5bSzUBlpLYoTgerA5883XzrKLHe_h7_x5UUgxAkDf7PoJ6m5XkV5ZxLSQpnjRqloDUtnFVihp-Svnt1zo1eStQ_i89ROrdGYSTTOackpuJmFy-rPDqbUOVz91EbCSIVfa7qur_pW7LB4l86bHVv56glIb4vkz_SvQnQh7xYxgGiook2x3FOW-HNlkOU62iBVOIGJ72BU1l2_TWwKgwzVFkVFu6_wloehrN-5F9IDrRswpFpFbbzHfMtwsCSfPYky92HjrgJvEKXg1rVfWBskJj8XYbdu4tVhTxSI0yKv0HNf6qnpzWqVt8jrt53L3_StirawxaxdnecsAgXtDqCRUGiIK07V0AhJQwpxcq1Af-nOAPt8Dc7G_ci-VIC8-yvDihtkPfRkGlYAuQKYXEgWXBf520X5LNpWFAeIMSSeh_u9_MxKRreS-Kxt22f67SbMCMBTy77kuvcz_dZPNqXoemG2wIULTwwNpiLCCpMmsQpJSjgxnacSvLjDKEvxonS79e7tuR4jyzaF5Td3KkGAB9Xfl_n_ovjcJ_BoY.yVwgn71YPTHXeijq-X00wQ@scale-307185671274.d.codeartifact.us-west-2.amazonaws.com/pypi/scale-pypi/simple/
+extra-index-url = https://pypi.python.org/simple
+
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.dockerignore b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/Dockerfile b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/Dockerfile
new file mode 100644
index 000000000..44b253f27
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/Dockerfile
@@ -0,0 +1,51 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install tctl (Temporal CLI)
+RUN curl -L https://github.com/temporalio/tctl/releases/download/v1.18.1/tctl_1.18.1_linux_arm64.tar.gz -o /tmp/tctl.tar.gz && \
+ tar -xzf /tmp/tctl.tar.gz -C /usr/local/bin && \
+ chmod +x /usr/local/bin/tctl && \
+ rm /tmp/tctl.tar.gz
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 010_agent_chat/requirements.txt /app/requirements.txt
+
+WORKDIR /app/
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 010_agent_chat/project /app/project
+
+WORKDIR /app/project
+
+# Run the ACP server using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
+
+# When we deploy the worker, we will replace the CMD with the following
+# CMD ["python", "-m", "run_worker"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/README.md b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/README.md
new file mode 100644
index 000000000..163da7bce
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/README.md
@@ -0,0 +1,132 @@
+# at010-agent-chat - AgentEx Temporal Agent Template
+
+This is a starter template for building asynchronous agents with the AgentEx framework and Temporal. It provides a basic implementation of the Agent 2 Client Protocol (ACP) with Temporal workflow support to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_canceled`: When a task is canceled
+- **Temporal Workflows**: Long-running processes that can handle complex state management and async operations
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server with Temporal integration
+- Handles each of the required ACP events
+- Provides a foundation for building complex async agents
+- Includes Temporal workflow and activity definitions
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+- Temporal worker configuration
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+010_agent_chat/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ ├── acp.py # ACP server and event handlers
+│ ├── workflow.py # Temporal workflow definitions
+│ ├── activities.py # Temporal activity definitions
+│ └── run_worker.py # Temporal worker setup
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Deploy your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents create --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/manifest.yaml b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/manifest.yaml
new file mode 100644
index 000000000..2e43c9828
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/manifest.yaml
@@ -0,0 +1,139 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 010_agent_chat
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 010_agent_chat/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 010_agent_chat/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+ # Path to temporal worker file
+ # Examples:
+ # project/run_worker.py (standard)
+ # workers/temporal.py (custom structure)
+ # ../shared/worker.py (shared across projects)
+ worker: project/run_worker.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Type of agent - either sync or agentic
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: at010-agent-chat
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agentthat streams multiturn tool-enabled chat with tracing
+
+ # Temporal workflow configuration
+ # This enables your agent to run as a Temporal workflow for long-running tasks
+ temporal:
+ enabled: true
+ workflows:
+ # Name of the workflow class
+ # Must match the @workflow.defn name in your workflow.py
+ - name: at010-agent-chat
+
+ # Queue name for task distribution
+ # Used by Temporal to route tasks to your agent
+ # Convention: _task_queue
+ queue_name: 010_agent_chat_queue
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # - name: OPENAI_BASE_URL
+ # value: "https://api.openai.com/v1"
+ # - name: ACCOUNT_ID
+ # value: "your_account_id_here"
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ imagePullSecrets:
+ - name: my-registry-secret # Update with your image pull secret name
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden using --override-file with custom configuration files
+ global:
+ agent:
+ name: "at010-agent-chat"
+ description: "An AgentEx agentthat streams multiturn tool-enabled chat with tracing"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/__init__.py b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/acp.py b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/acp.py
new file mode 100644
index 000000000..d3b8e52d6
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/acp.py
@@ -0,0 +1,31 @@
+import os
+
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import TemporalACPConfig
+
+
+# Create the ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=TemporalACPConfig(
+ # When deployed to the cluster, the Temporal address will automatically be set to the cluster address
+ # For local development, we set the address manually to talk to the local Temporal service set up via docker compose
+ type="temporal",
+ temporal_address=os.getenv("TEMPORAL_ADDRESS", "localhost:7233")
+ )
+)
+
+
+# Notice that we don't need to register any handlers when we use type="temporal"
+# If you look at the code in agentex.sdk.fastacp.impl.temporal_acp
+# You can see that these handlers are automatically registered when the ACP is created
+
+# @acp.on_task_create
+# This will be handled by the method in your workflow that is decorated with @workflow.run
+
+# @acp.on_task_event_send
+# This will be handled by the method in your workflow that is decorated with @workflow.signal(name=SignalName.RECEIVE_MESSAGE)
+
+# @acp.on_task_cancel
+# This does not need to be handled by your workflow.
+# It is automatically handled by the temporal client which cancels the workflow directly
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/run_worker.py b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/run_worker.py
new file mode 100644
index 000000000..b8916bc40
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/run_worker.py
@@ -0,0 +1,32 @@
+import asyncio
+
+from agentex.lib.core.temporal.activities import get_all_activities
+from agentex.lib.core.temporal.workers.worker import AgentexWorker
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.environment_variables import EnvironmentVariables
+
+from workflow import At010AgentChatWorkflow
+
+
+environment_variables = EnvironmentVariables.refresh()
+
+logger = make_logger(__name__)
+
+
+async def main():
+ task_queue_name = environment_variables.WORKFLOW_TASK_QUEUE
+ if task_queue_name is None:
+ raise ValueError("WORKFLOW_TASK_QUEUE is not set")
+
+ # Create a worker with automatic tracing
+ worker = AgentexWorker(
+ task_queue=task_queue_name,
+ )
+
+ await worker.run(
+ activities=get_all_activities(),
+ workflow=At010AgentChatWorkflow,
+ )
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/workflow.py b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/workflow.py
new file mode 100644
index 000000000..fc172438a
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/project/workflow.py
@@ -0,0 +1,147 @@
+import os
+from typing import Dict, List, override
+from dotenv import load_dotenv
+
+from dotenv import load_dotenv
+from agentex.lib.utils.model_utils import BaseModel
+from mcp import StdioServerParameters
+from temporalio import workflow
+
+from agentex.lib import adk
+from agentex.lib.types.acp import CreateTaskParams, SendEventParams
+from agentex.lib.core.temporal.workflows.workflow import BaseWorkflow
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.core.tracing.tracing_processor_manager import add_tracing_processor_config
+from agentex.lib.types.tracing import SGPTracingProcessorConfig
+from agentex.lib.environment_variables import EnvironmentVariables
+from agentex.types.text_content import TextContent
+
+environment_variables = EnvironmentVariables.refresh()
+load_dotenv(dotenv_path=".env")
+
+add_tracing_processor_config(SGPTracingProcessorConfig(
+ sgp_api_key=os.environ.get("SCALE_GP_API_KEY", ""),
+ sgp_account_id=os.environ.get("SCALE_GP_ACCOUNT_ID", ""),
+))
+
+if environment_variables.WORKFLOW_NAME is None:
+ raise ValueError("Environment variable WORKFLOW_NAME is not set")
+
+if environment_variables.AGENT_NAME is None:
+ raise ValueError("Environment variable AGENT_NAME is not set")
+
+logger = make_logger(__name__)
+
+
+class StateModel(BaseModel):
+ input_list: List[Dict]
+ turn_number: int
+
+
+MCP_SERVERS = [
+ StdioServerParameters(
+ command="npx",
+ args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
+ ),
+ StdioServerParameters(
+ command="uvx",
+ args=["openai-websearch-mcp"],
+ env={
+ "OPENAI_API_KEY": os.environ.get("OPENAI_API_KEY", "")
+ }
+ ),
+]
+
+@workflow.defn(name=environment_variables.WORKFLOW_NAME)
+class At010AgentChatWorkflow(BaseWorkflow):
+ """
+ Minimal async workflow template for AgentEx Temporal agents.
+ """
+ def __init__(self):
+ super().__init__(display_name=environment_variables.AGENT_NAME)
+ self._complete_task = False
+ self._state = None
+
+ @workflow.signal(name=SignalName.RECEIVE_EVENT)
+ @override
+ async def on_task_event_send(self, params: SendEventParams) -> None:
+ logger.info(f"Received task message instruction: {params}")
+
+ if not params.event.content:
+ return
+
+ if params.event.content.type != "text":
+ raise ValueError(f"Expected text message, got {params.event.content.type}")
+
+ if params.event.content.author != "user":
+ raise ValueError(f"Expected user message, got {params.event.content.author}")
+
+ # Increment the turn number
+ self._state.turn_number += 1
+ # Add the new user message to the message history
+ self._state.input_list.append({"role": "user", "content": params.event.content.content})
+
+ async with adk.tracing.span(
+ trace_id=params.task.id,
+ name=f"Turn {self._state.turn_number}",
+ input=self._state
+ ) as span:
+ # Echo back the user's message so it shows up in the UI. This is not done by default so the agent developer has full control over what is shown to the user.
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=params.event.content,
+ parent_span_id=span.id if span else None,
+ )
+
+ if not os.environ.get("OPENAI_API_KEY"):
+ await adk.messages.create(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content="Hey, sorry I'm unable to respond to your message because you're running this example without an OpenAI API key. Please set the OPENAI_API_KEY environment variable to run this example. Do this by either by adding a .env file to the project/ directory or by setting the environment variable in your terminal.",
+ ),
+ parent_span_id=span.id if span else None,
+ )
+
+ # Call an LLM to respond to the user's message
+ # When send_as_agent_task_message=True, returns a TaskMessage
+ run_result = await adk.providers.openai.run_agent_streamed_auto_send(
+ task_id=params.task.id,
+ trace_id=params.task.id,
+ input_list=self._state.input_list,
+ mcp_server_params=MCP_SERVERS,
+ agent_name="Tool-Enabled Assistant",
+ agent_instructions="""You are a helpful assistant that can answer questions using various tools.
+ You have access to sequential thinking and web search capabilities through MCP servers.
+ Use these tools when appropriate to provide accurate and well-reasoned responses.""",
+ parent_span_id=span.id if span else None,
+ )
+ self._state.input_list = run_result.final_input_list
+
+ # Set the span output to the state for the next turn
+ span.output = self._state
+
+ @workflow.run
+ @override
+ async def on_task_create(self, params: CreateTaskParams) -> None:
+ logger.info(f"Received task create params: {params}")
+
+ # 1. Initialize the state. You can either do this here or in the __init__ method.
+ # This function is triggered whenever a client creates a task for this agent.
+ # It is not re-triggered when a new event is sent to the task.
+ self._state = StateModel(
+ input_list=[],
+ turn_number=0,
+ )
+
+ # 2. Wait for the task to be completed indefinitely. If we don't do this the workflow will close as soon as this function returns. Temporal can run hundreds of millions of workflows in parallel, so you don't need to worry about too many workflows running at once.
+
+ # Thus, if you want this agent to field events indefinitely (or for a long time) you need to wait for a condition to be met.
+
+ await workflow.wait_condition(
+ lambda: self._complete_task,
+ timeout=None, # Set a timeout if you want to prevent the task from running indefinitely. Generally this is not needed. Temporal can run hundreds of millions of workflows in parallel and more. Only do this if you have a specific reason to do so.
+ )
diff --git a/examples/tutorials/10_agentic/10_temporal/010_agent_chat/requirements.txt b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/requirements.txt
new file mode 100644
index 000000000..d12ad3951
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/010_agent_chat/requirements.txt
@@ -0,0 +1,4 @@
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/.dockerignore b/examples/tutorials/10_agentic/10_temporal/020_state_machine/.dockerignore
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/.dockerignore
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/Dockerfile b/examples/tutorials/10_agentic/10_temporal/020_state_machine/Dockerfile
new file mode 100644
index 000000000..923964f3d
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/Dockerfile
@@ -0,0 +1,51 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install tctl (Temporal CLI)
+RUN curl -L https://github.com/temporalio/tctl/releases/download/v1.18.1/tctl_1.18.1_linux_arm64.tar.gz -o /tmp/tctl.tar.gz && \
+ tar -xzf /tmp/tctl.tar.gz -C /usr/local/bin && \
+ chmod +x /usr/local/bin/tctl && \
+ rm /tmp/tctl.tar.gz
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY 10_agentic/10_temporal/020_state_machine/requirements.txt /app/10_agentic/10_temporal/020_state_machine/requirements.txt
+
+WORKDIR /app/10_agentic/10_temporal/020_state_machine
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY 10_agentic/10_temporal/020_state_machine/project /app/10_agentic/10_temporal/020_state_machine/project
+
+WORKDIR /app/10_agentic/10_temporal/020_state_machine/project
+
+# Run the ACP server using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
+
+# When we deploy the worker, we will replace the CMD with the following
+# CMD ["python", "-m", "run_worker"]
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/README.md b/examples/tutorials/10_agentic/10_temporal/020_state_machine/README.md
new file mode 100644
index 000000000..18c51d188
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/README.md
@@ -0,0 +1,145 @@
+# at020-state-machine - AgentEx Temporal Agent Template
+
+This is a starter template for building asynchronous agents with the AgentEx framework and Temporal. It provides a basic implementation of the Agent 2 Client Protocol (ACP) with Temporal workflow support to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+- **Temporal Workflows**: Long-running processes that can handle complex state management and async operations
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server with Temporal integration
+- Handles each of the required ACP events
+- Provides a foundation for building complex async agents
+- Includes Temporal workflow and activity definitions
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+- Temporal worker configuration
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+020_state_machine/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ ├── acp.py # ACP server and event handlers
+│ ├── workflow.py # Temporal workflow definitions
+│ ├── activities.py # Temporal activity definitions
+│ └── run_worker.py # Temporal worker setup
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+└── requirements.txt # Dependencies
+```
+
+## Development
+
+1. **Customize Event Handlers**
+ - Modify the handlers in `acp.py` to implement your agent's logic
+ - Add your own tools and capabilities
+ - Implement custom state management
+
+2. **Add Dependencies**
+ - Add required packages to `requirements.txt`
+ - Update the manifest with any needed credentials
+
+## Local Development
+
+1. **Install AgentEx**
+```bash
+cd agentex-py
+uv venv
+source .venv/bin/activate
+uv sync
+```
+
+2. **Start the Agentex Server**
+```bash
+# Navigate to the backend directory
+cd agentex
+
+# Start all services using Docker Compose
+make dev
+
+# Optional: In a separate terminal, use lazydocker for a better UI (everything should say "healthy")
+lzd
+```
+
+3. **Run your agent**
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents run --manifest manifest.yaml
+```
+
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent at020-state-machine --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+1. **Local Testing**
+- Set environment variables in project/.env for any required credentials. OR you can set these in the manifest.yaml
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/manifest.yaml b/examples/tutorials/10_agentic/10_temporal/020_state_machine/manifest.yaml
new file mode 100644
index 000000000..2835df837
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/manifest.yaml
@@ -0,0 +1,138 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - agentex-py: The core framework (needed for agent execution)
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - 10_agentic/10_temporal/020_state_machine
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: 10_agentic/10_temporal/020_state_machine/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: 10_agentic/10_temporal/020_state_machine/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+ # Path to temporal worker file
+ # Examples:
+ # project/run_worker.py (standard)
+ # workers/temporal.py (custom structure)
+ # ../shared/worker.py (shared across projects)
+ worker: project/run_worker.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Type of agent - either sync or agentic
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: at020-state-machine
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: An AgentEx agentthat demonstrates how to uose state machines to manage complex agentic workflows
+
+ # Temporal workflow configuration
+ # This enables your agent to run as a Temporal workflow for long-running tasks
+ temporal:
+ enabled: true
+ workflows:
+ # Name of the workflow class
+ # Must match the @workflow.defn name in your workflow.py
+ - name: at020-state-machine
+
+ # Queue name for task distribution
+ # Used by Temporal to route tasks to your agent
+ # Convention: _task_queue
+ queue_name: 020_state_machine_queue
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # OPENAI_API_KEY: ""
+ # OPENAI_BASE_URL: ""
+ # OPENAI_ORG_ID: ""
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ imagePullSecrets:
+ - name: my-registry-secret # Update with your image pull secret name
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden using --override-file with custom configuration files
+ global:
+ agent:
+ name: "at020-state-machine"
+ description: "An AgentEx agentthat demonstrates how to uose state machines to manage complex agentic workflows"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/__init__.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/acp.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/acp.py
new file mode 100644
index 000000000..d3b8e52d6
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/acp.py
@@ -0,0 +1,31 @@
+import os
+
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import TemporalACPConfig
+
+
+# Create the ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=TemporalACPConfig(
+ # When deployed to the cluster, the Temporal address will automatically be set to the cluster address
+ # For local development, we set the address manually to talk to the local Temporal service set up via docker compose
+ type="temporal",
+ temporal_address=os.getenv("TEMPORAL_ADDRESS", "localhost:7233")
+ )
+)
+
+
+# Notice that we don't need to register any handlers when we use type="temporal"
+# If you look at the code in agentex.sdk.fastacp.impl.temporal_acp
+# You can see that these handlers are automatically registered when the ACP is created
+
+# @acp.on_task_create
+# This will be handled by the method in your workflow that is decorated with @workflow.run
+
+# @acp.on_task_event_send
+# This will be handled by the method in your workflow that is decorated with @workflow.signal(name=SignalName.RECEIVE_MESSAGE)
+
+# @acp.on_task_cancel
+# This does not need to be handled by your workflow.
+# It is automatically handled by the temporal client which cancels the workflow directly
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/run_worker.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/run_worker.py
new file mode 100644
index 000000000..a5e6049a0
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/run_worker.py
@@ -0,0 +1,33 @@
+import asyncio
+import os
+
+from agentex.lib.core.temporal.activities import get_all_activities
+from agentex.lib.core.temporal.workers.worker import AgentexWorker
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.environment_variables import EnvironmentVariables
+
+from workflow import At020StateMachineWorkflow
+
+
+environment_variables = EnvironmentVariables.refresh()
+
+logger = make_logger(__name__)
+
+
+async def main():
+ task_queue_name = environment_variables.WORKFLOW_TASK_QUEUE
+ if task_queue_name is None:
+ raise ValueError("WORKFLOW_TASK_QUEUE is not set")
+
+ # Create a worker with automatic tracing
+ worker = AgentexWorker(
+ task_queue=task_queue_name,
+ )
+
+ await worker.run(
+ activities=get_all_activities(),
+ workflow=At020StateMachineWorkflow,
+ )
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/state_machines/deep_research.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/state_machines/deep_research.py
new file mode 100644
index 000000000..add9c185e
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/state_machines/deep_research.py
@@ -0,0 +1,40 @@
+from enum import Enum
+from typing import Dict, List, Optional, override
+from agentex.types.span import Span
+from pydantic import BaseModel
+
+from agentex.lib.sdk.state_machine import StateMachine
+
+
+class DeepResearchState(str, Enum):
+ """States for the deep research workflow."""
+ CLARIFYING_USER_QUERY = "clarifying_user_query"
+ PERFORMING_DEEP_RESEARCH = "performing_deep_research"
+ WAITING_FOR_USER_INPUT = "waiting_for_user_input"
+ COMPLETED = "completed"
+ FAILED = "failed"
+
+
+class DeepResearchData(BaseModel):
+ """Data model for the deep research state machine - everything is one continuous research report."""
+ task_id: Optional[str] = None
+ current_span: Optional[Span] = None
+ current_turn: int = 1
+
+ # Research report data
+ user_query: str = ""
+ follow_up_questions: List[str] = []
+ follow_up_responses: List[str] = []
+ n_follow_up_questions_to_ask: int = 1
+ agent_input_list: List[Dict[str, str]] = []
+ research_report: str = ""
+ research_iteration: int = 0
+
+
+class DeepResearchStateMachine(StateMachine[DeepResearchData]):
+ """State machine for the deep research workflow."""
+
+ @override
+ async def terminal_condition(self) -> bool:
+ """Check if the state machine has reached a terminal state."""
+ return self.get_current_state() == DeepResearchState.COMPLETED
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflow.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflow.py
new file mode 100644
index 000000000..6e374bde8
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflow.py
@@ -0,0 +1,142 @@
+import asyncio
+import json
+from typing import override
+
+from temporalio import workflow
+
+from agentex.lib import adk
+from agentex.lib.types.acp import CreateTaskParams, SendEventParams
+from agentex.lib.core.temporal.workflows.workflow import BaseWorkflow
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.utils.logging import make_logger
+from agentex.types.text_content import TextContent
+from agentex.lib.environment_variables import EnvironmentVariables
+from agentex.lib.sdk.state_machine.state import State
+
+from state_machines.deep_research import DeepResearchStateMachine, DeepResearchState, DeepResearchData
+from workflows.deep_research.clarify_user_query import ClarifyUserQueryWorkflow
+from workflows.deep_research.waiting_for_user_input import WaitingForUserInputWorkflow
+from workflows.deep_research.performing_deep_research import PerformingDeepResearchWorkflow
+
+environment_variables = EnvironmentVariables.refresh()
+
+if environment_variables.WORKFLOW_NAME is None:
+ raise ValueError("Environment variable WORKFLOW_NAME is not set")
+
+if environment_variables.AGENT_NAME is None:
+ raise ValueError("Environment variable AGENT_NAME is not set")
+
+logger = make_logger(__name__)
+
+@workflow.defn(name=environment_variables.WORKFLOW_NAME)
+class At020StateMachineWorkflow(BaseWorkflow):
+ """
+ Minimal async workflow template for AgentEx Temporal agents.
+ """
+ def __init__(self):
+ super().__init__(display_name=environment_variables.AGENT_NAME)
+ self.state_machine = DeepResearchStateMachine(
+ initial_state=DeepResearchState.WAITING_FOR_USER_INPUT,
+ states=[
+ State(name=DeepResearchState.CLARIFYING_USER_QUERY, workflow=ClarifyUserQueryWorkflow()),
+ State(name=DeepResearchState.WAITING_FOR_USER_INPUT, workflow=WaitingForUserInputWorkflow()),
+ State(name=DeepResearchState.PERFORMING_DEEP_RESEARCH, workflow=PerformingDeepResearchWorkflow()),
+ ],
+ state_machine_data=DeepResearchData(),
+ trace_transitions=True
+ )
+
+ @override
+ @workflow.signal(name=SignalName.RECEIVE_EVENT)
+ async def on_task_event_send(self, params: SendEventParams) -> None:
+ deep_research_data = self.state_machine.get_state_machine_data()
+ task = params.task
+ message = params.event.content
+
+ # If waiting for user input, handle the message
+ if self.state_machine.get_current_state() == DeepResearchState.WAITING_FOR_USER_INPUT:
+ if not deep_research_data.user_query:
+ # First time - initialize research data
+ deep_research_data.user_query = message.content
+ deep_research_data.current_turn += 1
+
+ if not deep_research_data.current_span:
+ deep_research_data.current_span = await adk.tracing.start_span(
+ trace_id=task.id,
+ name=f"Turn {deep_research_data.current_turn}",
+ input={
+ "task_id": task.id,
+ "message": message.content,
+ }
+ )
+ else:
+ # Check if we're in the middle of follow-up questions
+ if deep_research_data.n_follow_up_questions_to_ask > 0:
+ # User is responding to a follow-up question
+ deep_research_data.follow_up_responses.append(message.content)
+
+ # Add the Q&A to the agent input list as context
+ if deep_research_data.follow_up_questions:
+ last_question = deep_research_data.follow_up_questions[-1]
+ qa_context = f"Q: {last_question}\nA: {message.content}"
+ deep_research_data.agent_input_list.append({
+ "role": "user",
+ "content": qa_context
+ })
+ else:
+ # User is asking a new follow-up question about the same research topic
+ # Add the user's follow-up question to the agent input list as context
+ if deep_research_data.agent_input_list:
+ # Add user's follow-up question to the conversation
+ deep_research_data.agent_input_list.append({
+ "role": "user",
+ "content": f"Additional question: {message.content}"
+ })
+ else:
+ # Initialize agent input list with the follow-up question
+ deep_research_data.agent_input_list = [{
+ "role": "user",
+ "content": f"Original query: {deep_research_data.user_query}\nAdditional question: {message.content}"
+ }]
+
+ deep_research_data.current_turn += 1
+
+ if not deep_research_data.current_span:
+ deep_research_data.current_span = await adk.tracing.start_span(
+ trace_id=task.id,
+ name=f"Turn {deep_research_data.current_turn}",
+ input={
+ "task_id": task.id,
+ "message": message.content,
+ }
+ )
+
+ # Always go to clarifying user query to ask follow-up questions
+ # This ensures we gather more context before doing deep research
+ await self.state_machine.transition(DeepResearchState.CLARIFYING_USER_QUERY)
+
+ # Echo back the user's message
+ await adk.messages.create(
+ task_id=task.id,
+ content=TextContent(
+ author="user",
+ content=message.content,
+ ),
+ trace_id=task.id,
+ parent_span_id=deep_research_data.current_span.id if deep_research_data.current_span else None,
+ )
+
+ @override
+ @workflow.run
+ async def on_task_create(self, params: CreateTaskParams) -> None:
+ task = params.task
+
+ self.state_machine.set_task_id(task.id)
+ deep_research_data = self.state_machine.get_state_machine_data()
+ deep_research_data.task_id = task.id
+
+ try:
+ await self.state_machine.run()
+ except asyncio.CancelledError as error:
+ logger.warning(f"Task canceled by user: {task.id}")
+ raise error
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/clarify_user_query.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/clarify_user_query.py
new file mode 100644
index 000000000..4ef6834a4
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/clarify_user_query.py
@@ -0,0 +1,83 @@
+from typing import Optional, override
+from agentex.lib.sdk.state_machine.state_machine import StateMachine
+
+from agentex.lib import adk
+from agentex.lib.sdk.state_machine.state_workflow import StateWorkflow
+from agentex.lib.types.llm_messages import LLMConfig, SystemMessage, UserMessage
+from agentex.lib.utils.logging import make_logger
+
+from state_machines.deep_research import DeepResearchData, DeepResearchState
+
+logger = make_logger(__name__)
+
+FOLLOW_UP_QUESTION_TEMPLATE = """
+Given the following research query from the user, ask a follow up question to clarify the research direction.
+
+{{ user_query }}
+
+
+{% if follow_up_questions|length > 0 %}
+The following are follow up questions and answers that have been asked/given so far:
+{% for q in follow_up_questions %}
+Q: {{ follow_up_questions[loop.index0] }}
+A: {{ follow_up_responses[loop.index0] }}
+{% endfor %}
+{% endif %}
+
+Return the follow up question and nothing else.
+Follow up question:
+"""
+
+class ClarifyUserQueryWorkflow(StateWorkflow):
+ """Workflow for engaging in follow-up questions."""
+
+ @override
+ async def execute(self, state_machine: StateMachine, state_machine_data: Optional[DeepResearchData] = None) -> str:
+ """Execute the workflow."""
+ if state_machine_data is None:
+ return DeepResearchState.PERFORMING_DEEP_RESEARCH
+
+ if state_machine_data.n_follow_up_questions_to_ask == 0:
+ # No more follow-up questions to ask, proceed to deep research
+ return DeepResearchState.PERFORMING_DEEP_RESEARCH
+
+ # Generate follow-up question prompt
+ if state_machine_data.task_id and state_machine_data.current_span:
+ follow_up_question_generation_prompt = await adk.utils.templating.render_jinja(
+ trace_id=state_machine_data.task_id,
+ template=FOLLOW_UP_QUESTION_TEMPLATE,
+ variables={
+ "user_query": state_machine_data.user_query,
+ "follow_up_questions": state_machine_data.follow_up_questions,
+ "follow_up_responses": state_machine_data.follow_up_responses
+ },
+ parent_span_id=state_machine_data.current_span.id,
+ )
+
+ task_message = await adk.providers.litellm.chat_completion_stream_auto_send(
+ task_id=state_machine_data.task_id,
+ llm_config=LLMConfig(
+ model="gpt-4o-mini",
+ messages=[
+ SystemMessage(content="You are assistant that follows exact instructions without outputting any other text except your response to the user's exact request."),
+ UserMessage(content=follow_up_question_generation_prompt),
+ ],
+ stream=True,
+ ),
+ trace_id=state_machine_data.task_id,
+ parent_span_id=state_machine_data.current_span.id,
+ )
+ follow_up_question = task_message.content.content
+
+ # Update with follow-up question
+ state_machine_data.follow_up_questions.append(follow_up_question)
+
+ # Decrement the number of follow-up questions to ask
+ state_machine_data.n_follow_up_questions_to_ask -= 1
+
+ logger.info(f"Current research data: {state_machine_data}")
+
+ # Always go back to waiting for user input to get their response
+ return DeepResearchState.WAITING_FOR_USER_INPUT
+ else:
+ return DeepResearchState.PERFORMING_DEEP_RESEARCH
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/performing_deep_research.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/performing_deep_research.py
new file mode 100644
index 000000000..d8afbc82b
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/performing_deep_research.py
@@ -0,0 +1,156 @@
+from datetime import datetime
+import os
+from typing import Optional, override
+
+from agentex.lib import adk
+from agentex.lib.sdk.state_machine.state_machine import StateMachine
+from mcp import StdioServerParameters
+
+from agentex.lib.sdk.state_machine.state_workflow import StateWorkflow
+from agentex.types.text_content import TextContent
+from agentex.lib.utils.logging import make_logger
+
+from state_machines.deep_research import DeepResearchData, DeepResearchState
+
+logger = make_logger(__name__)
+
+MCP_SERVERS = [
+ StdioServerParameters(
+ command="uvx",
+ args=["mcp-server-time", "--local-timezone", "America/Los_Angeles"],
+ ),
+ StdioServerParameters(
+ command="uvx",
+ args=["openai-websearch-mcp"],
+ env={
+ "OPENAI_API_KEY": os.environ.get("OPENAI_API_KEY", "")
+ }
+ ),
+ StdioServerParameters(
+ command="uvx",
+ args=["mcp-server-fetch"],
+ ),
+]
+
+class PerformingDeepResearchWorkflow(StateWorkflow):
+ """Workflow for performing deep research."""
+
+ @override
+ async def execute(self, state_machine: StateMachine, state_machine_data: Optional[DeepResearchData] = None) -> str:
+ """Execute the workflow."""
+ if state_machine_data is None:
+ return DeepResearchState.CLARIFYING_USER_QUERY
+
+ if not state_machine_data.user_query:
+ return DeepResearchState.CLARIFYING_USER_QUERY
+
+ # Construct initial research instruction
+ follow_up_qa_str = ""
+ for q, r in zip(state_machine_data.follow_up_questions, state_machine_data.follow_up_responses):
+ follow_up_qa_str += f"Q: {q}\nA: {r}\n"
+
+ # Increment research iteration
+ state_machine_data.research_iteration += 1
+
+ # Create research instruction based on whether this is the first iteration or a continuation
+ if state_machine_data.research_iteration == 1:
+ initial_instruction = (
+ f"Initial Query: {state_machine_data.user_query}\n"
+ f"Follow-up Q&A:\n{follow_up_qa_str}"
+ )
+
+ # Notify user that deep research is starting
+ if state_machine_data.task_id and state_machine_data.current_span:
+ await adk.messages.create(
+ task_id=state_machine_data.task_id,
+ content=TextContent(
+ author="agent",
+ content="Starting deep research process based on your query and follow-up responses...",
+ ),
+ trace_id=state_machine_data.task_id,
+ parent_span_id=state_machine_data.current_span.id,
+ )
+ else:
+ initial_instruction = (
+ f"Initial Query: {state_machine_data.user_query}\n"
+ f"Follow-up Q&A:\n{follow_up_qa_str}\n"
+ f"Current Research Report (Iteration {state_machine_data.research_iteration - 1}):\n{state_machine_data.research_report}"
+ )
+
+ # Notify user that research is continuing
+ if state_machine_data.task_id and state_machine_data.current_span:
+ await adk.messages.create(
+ task_id=state_machine_data.task_id,
+ content=TextContent(
+ author="agent",
+ content=f"Continuing deep research (iteration {state_machine_data.research_iteration}) to expand and refine the research report...",
+ ),
+ trace_id=state_machine_data.task_id,
+ parent_span_id=state_machine_data.current_span.id,
+ )
+
+ # Fetch the current time in human readable format
+ current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S %Z")
+
+ # Deep Research Loop
+ if not state_machine_data.agent_input_list:
+ state_machine_data.agent_input_list = [
+ {"role": "user", "content": f"""
+Here is my initial query, clarified with the following follow-up questions and answers:
+{initial_instruction}
+
+You should now perform a depth search to get a more detailed understanding of the most promising areas.
+
+The current time is {current_time}.
+"""}
+ ]
+
+ if state_machine_data.task_id and state_machine_data.current_span:
+ result = await adk.providers.openai.run_agent_streamed_auto_send(
+ task_id=state_machine_data.task_id,
+ trace_id=state_machine_data.task_id,
+ input_list=state_machine_data.agent_input_list,
+ mcp_server_params=MCP_SERVERS,
+ agent_name="Deep Research Agent",
+ agent_instructions=f"""You are a deep research expert that can search the web for information.
+You should use the tools you have access to to write an extensive report on the users query.
+
+You must use the web search tool at least 10 times before writing your report.
+Use the fetch tool to open links you want to read.
+Then use web search again repeatedly to dig deeper into the most promising areas of search results.
+
+Be very targeted with your searches, make sure all search queries are relevant to either the initial user query or dig deeper into the most promising areas of search results. All searches should tie back to the original query though. Remember your searches are stateless, so there is no context shared between search queries.
+
+Always cite your sources in the format [source](link). Do not hallucinate. Your latent information is not likely to be up to date.
+
+If this is a continuation of previous research (iteration {state_machine_data.research_iteration}), focus on:
+1. Expanding areas that need more detail
+2. Adding new relevant information discovered
+3. Removing outdated or incorrect information
+4. Improving the overall structure and clarity of the report
+""",
+ parent_span_id=state_machine_data.current_span.id,
+ mcp_timeout_seconds=180,
+ )
+
+ # Update state with conversation history
+ state_machine_data.agent_input_list = result.final_input_list
+
+ # Extract the research report from the last assistant message
+ if result.final_input_list:
+ for message in reversed(result.final_input_list):
+ if message.get("role") == "assistant":
+ state_machine_data.research_report = message.get("content", "")
+ break
+
+ # Keep the research data active for future iterations
+
+ if state_machine_data.task_id and state_machine_data.current_span:
+ await adk.tracing.end_span(
+ trace_id=state_machine_data.task_id,
+ span=state_machine_data.current_span,
+ )
+ state_machine_data.current_span = None
+
+ # Transition to waiting for user input state
+ return DeepResearchState.WAITING_FOR_USER_INPUT
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/waiting_for_user_input.py b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/waiting_for_user_input.py
new file mode 100644
index 000000000..94aa89243
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/project/workflows/deep_research/waiting_for_user_input.py
@@ -0,0 +1,17 @@
+from typing import override
+from agentex.lib.sdk.state_machine import StateWorkflow, StateMachine
+from agentex.lib.utils.logging import make_logger
+from temporalio import workflow
+from state_machines.deep_research import DeepResearchData, DeepResearchState
+
+logger = make_logger(__name__)
+
+class WaitingForUserInputWorkflow(StateWorkflow):
+ @override
+ async def execute(self, state_machine: StateMachine, state_machine_data: DeepResearchData = None) -> str:
+ logger.info("ActorWaitingForUserInputWorkflow: waiting for user input...")
+ def condition():
+ current_state = state_machine.get_current_state()
+ return current_state != DeepResearchState.WAITING_FOR_USER_INPUT
+ await workflow.wait_condition(condition)
+ return state_machine.get_current_state()
\ No newline at end of file
diff --git a/examples/tutorials/10_agentic/10_temporal/020_state_machine/requirements.txt b/examples/tutorials/10_agentic/10_temporal/020_state_machine/requirements.txt
new file mode 100644
index 000000000..2c2ebf15b
--- /dev/null
+++ b/examples/tutorials/10_agentic/10_temporal/020_state_machine/requirements.txt
@@ -0,0 +1,5 @@
+# Install agentex-py from local path
+agentex-py>=0.0.4,<0.1.0
+
+# Scale GenAI Platform Python SDK
+scale-gp
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 2769d9ca5..3784b5ce5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "agentex"
-version = "0.0.1-alpha.1"
+version = "0.1.0-alpha.1"
description = "The official Python library for the agentex API"
dynamic = ["readme"]
license = "Apache-2.0"
@@ -8,23 +8,44 @@ authors = [
{ name = "Agentex", email = "" },
]
dependencies = [
- "httpx>=0.23.0, <1",
- "pydantic>=1.9.0, <3",
+ "httpx>=0.27.2,<0.28",
+ "pydantic>=2.0.0, <3",
"typing-extensions>=4.10, <5",
"anyio>=3.5.0, <5",
"distro>=1.7.0, <2",
"sniffio",
+ "typer>=0.16,<0.17",
+ "questionary>=2.0.1,<3",
+ "rich>=13.9.2,<14",
+ "fastapi>=0.115.0,<0.116",
+ "uvicorn>=0.31.1",
+ "python-on-whales>=0.73.0,<0.74",
+ "pyyaml>=6.0.2,<7",
+ "jsonschema>=4.23.0,<5",
+ "jsonref>=1.1.0,<2",
+ "temporalio>=1.10.0,<2",
+ "aiohttp>=3.10.10,<4",
+ "redis>=5.2.0,<6",
+ "litellm>=1.66.0,<2",
+ "kubernetes>=25.0.0,<29.0.0",
+ "jinja2>=3.1.3,<4",
+ "mcp[cli]>=1.4.1",
+ "scale-gp>=0.1.0a59",
+ "openai-agents>=0.0.7",
+ "tzlocal>=5.3.1",
+ "tzdata>=2025.2",
+ "pytest>=8.4.0",
+ "pytest-asyncio>=1.0.0",
+ "scale-gp-beta==0.1.0a20",
+ "ipykernel>=6.29.5",
]
-requires-python = ">= 3.8"
+requires-python = ">= 3.12,<4"
classifiers = [
"Typing :: Typed",
"Intended Audience :: Developers",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Operating System :: OS Independent",
"Operating System :: POSIX",
"Operating System :: MacOS",
@@ -40,6 +61,12 @@ Repository = "https://github.com/scaleapi/agentex-python"
[project.optional-dependencies]
aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"]
+dev = [
+ "ruff>=0.3.4",
+]
+
+[project.scripts]
+agentex = "agentex.lib.cli.commands.main:app"
[tool.rye]
managed = true
@@ -142,7 +169,7 @@ filterwarnings = [
# there are a couple of flags that are still disabled by
# default in strict mode as they are experimental and niche.
typeCheckingMode = "strict"
-pythonVersion = "3.8"
+pythonVersion = "3.12"
exclude = [
"_dev",
diff --git a/src/agentex/__init__.py b/src/agentex/__init__.py
index 891c638e5..f2e665bce 100644
--- a/src/agentex/__init__.py
+++ b/src/agentex/__init__.py
@@ -5,7 +5,18 @@
from . import types
from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes
from ._utils import file_from_path
-from ._client import Client, Stream, Agentex, Timeout, Transport, AsyncClient, AsyncStream, AsyncAgentex, RequestOptions
+from ._client import (
+ ENVIRONMENTS,
+ Client,
+ Stream,
+ Agentex,
+ Timeout,
+ Transport,
+ AsyncClient,
+ AsyncStream,
+ AsyncAgentex,
+ RequestOptions,
+)
from ._models import BaseModel
from ._version import __title__, __version__
from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse
@@ -29,6 +40,7 @@
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
from ._utils._logs import setup_logging as _setup_logging
+
__all__ = [
"types",
"__version__",
@@ -61,6 +73,7 @@
"AsyncStream",
"Agentex",
"AsyncAgentex",
+ "ENVIRONMENTS",
"file_from_path",
"BaseModel",
"DEFAULT_TIMEOUT",
diff --git a/src/agentex/_client.py b/src/agentex/_client.py
index ee34e3579..51ebb5e80 100644
--- a/src/agentex/_client.py
+++ b/src/agentex/_client.py
@@ -3,8 +3,8 @@
from __future__ import annotations
import os
-from typing import Any, Union, Mapping
-from typing_extensions import Self, override
+from typing import Any, Dict, Union, Mapping, cast
+from typing_extensions import Self, Literal, override
import httpx
@@ -12,10 +12,7 @@
from ._qs import Querystring
from ._types import (
NOT_GIVEN,
- Body,
Omit,
- Query,
- Headers,
Timeout,
NotGiven,
Transport,
@@ -24,30 +21,35 @@
)
from ._utils import is_given, get_async_library
from ._version import __version__
-from ._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from .resources import echo, spans, events, states, tracker
+from .resources import spans, tasks, agents, events, states, tracker
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
from ._exceptions import APIStatusError
from ._base_client import (
DEFAULT_MAX_RETRIES,
SyncAPIClient,
AsyncAPIClient,
- make_request_options,
)
-from .resources.tasks import tasks
-from .resources.agents import agents
from .resources.messages import messages
-__all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "Agentex", "AsyncAgentex", "Client", "AsyncClient"]
+__all__ = [
+ "ENVIRONMENTS",
+ "Timeout",
+ "Transport",
+ "ProxiesTypes",
+ "RequestOptions",
+ "Agentex",
+ "AsyncAgentex",
+ "Client",
+ "AsyncClient",
+]
+
+ENVIRONMENTS: Dict[str, str] = {
+ "production": "http://localhost:5003",
+ "development": "http://localhost:5003",
+}
class Agentex(SyncAPIClient):
- echo: echo.EchoResource
agents: agents.AgentsResource
tasks: tasks.TasksResource
messages: messages.MessagesResource
@@ -61,11 +63,14 @@ class Agentex(SyncAPIClient):
# client options
api_key: str | None
+ _environment: Literal["production", "development"] | NotGiven
+
def __init__(
self,
*,
api_key: str | None = None,
- base_url: str | httpx.URL | None = None,
+ environment: Literal["production", "development"] | NotGiven = NOT_GIVEN,
+ base_url: str | httpx.URL | None | NotGiven = NOT_GIVEN,
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
@@ -92,10 +97,31 @@ def __init__(
api_key = os.environ.get("AGENTEX_SDK_API_KEY")
self.api_key = api_key
- if base_url is None:
- base_url = os.environ.get("AGENTEX_BASE_URL")
- if base_url is None:
- base_url = f"https://api.example.com"
+ self._environment = environment
+
+ base_url_env = os.environ.get("AGENTEX_BASE_URL")
+ if is_given(base_url) and base_url is not None:
+ # cast required because mypy doesn't understand the type narrowing
+ base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast]
+ elif is_given(environment):
+ if base_url_env and base_url is not None:
+ raise ValueError(
+ "Ambiguous URL; The `AGENTEX_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None",
+ )
+
+ try:
+ base_url = ENVIRONMENTS[environment]
+ except KeyError as exc:
+ raise ValueError(f"Unknown environment: {environment}") from exc
+ elif base_url_env is not None:
+ base_url = base_url_env
+ else:
+ self._environment = environment = "production"
+
+ try:
+ base_url = ENVIRONMENTS[environment]
+ except KeyError as exc:
+ raise ValueError(f"Unknown environment: {environment}") from exc
super().__init__(
version=__version__,
@@ -108,7 +134,6 @@ def __init__(
_strict_response_validation=_strict_response_validation,
)
- self.echo = echo.EchoResource(self)
self.agents = agents.AgentsResource(self)
self.tasks = tasks.TasksResource(self)
self.messages = messages.MessagesResource(self)
@@ -141,21 +166,11 @@ def default_headers(self) -> dict[str, str | Omit]:
**self._custom_headers,
}
- @override
- def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
- if self.api_key and headers.get("Authorization"):
- return
- if isinstance(custom_headers.get("Authorization"), Omit):
- return
-
- raise TypeError(
- '"Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted"'
- )
-
def copy(
self,
*,
api_key: str | None = None,
+ environment: Literal["production", "development"] | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
http_client: httpx.Client | None = None,
@@ -191,6 +206,7 @@ def copy(
return self.__class__(
api_key=api_key or self.api_key,
base_url=base_url or self.base_url,
+ environment=environment or self._environment,
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
http_client=http_client,
max_retries=max_retries if is_given(max_retries) else self.max_retries,
@@ -203,25 +219,6 @@ def copy(
# client.with_options(timeout=10).foo.create(...)
with_options = copy
- def get_root(
- self,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """Root"""
- return self.get(
- "/",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
@override
def _make_status_error(
self,
@@ -257,7 +254,6 @@ def _make_status_error(
class AsyncAgentex(AsyncAPIClient):
- echo: echo.AsyncEchoResource
agents: agents.AsyncAgentsResource
tasks: tasks.AsyncTasksResource
messages: messages.AsyncMessagesResource
@@ -271,11 +267,14 @@ class AsyncAgentex(AsyncAPIClient):
# client options
api_key: str | None
+ _environment: Literal["production", "development"] | NotGiven
+
def __init__(
self,
*,
api_key: str | None = None,
- base_url: str | httpx.URL | None = None,
+ environment: Literal["production", "development"] | NotGiven = NOT_GIVEN,
+ base_url: str | httpx.URL | None | NotGiven = NOT_GIVEN,
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
@@ -302,10 +301,31 @@ def __init__(
api_key = os.environ.get("AGENTEX_SDK_API_KEY")
self.api_key = api_key
- if base_url is None:
- base_url = os.environ.get("AGENTEX_BASE_URL")
- if base_url is None:
- base_url = f"https://api.example.com"
+ self._environment = environment
+
+ base_url_env = os.environ.get("AGENTEX_BASE_URL")
+ if is_given(base_url) and base_url is not None:
+ # cast required because mypy doesn't understand the type narrowing
+ base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast]
+ elif is_given(environment):
+ if base_url_env and base_url is not None:
+ raise ValueError(
+ "Ambiguous URL; The `AGENTEX_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None",
+ )
+
+ try:
+ base_url = ENVIRONMENTS[environment]
+ except KeyError as exc:
+ raise ValueError(f"Unknown environment: {environment}") from exc
+ elif base_url_env is not None:
+ base_url = base_url_env
+ else:
+ self._environment = environment = "production"
+
+ try:
+ base_url = ENVIRONMENTS[environment]
+ except KeyError as exc:
+ raise ValueError(f"Unknown environment: {environment}") from exc
super().__init__(
version=__version__,
@@ -318,7 +338,6 @@ def __init__(
_strict_response_validation=_strict_response_validation,
)
- self.echo = echo.AsyncEchoResource(self)
self.agents = agents.AsyncAgentsResource(self)
self.tasks = tasks.AsyncTasksResource(self)
self.messages = messages.AsyncMessagesResource(self)
@@ -351,21 +370,11 @@ def default_headers(self) -> dict[str, str | Omit]:
**self._custom_headers,
}
- @override
- def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
- if self.api_key and headers.get("Authorization"):
- return
- if isinstance(custom_headers.get("Authorization"), Omit):
- return
-
- raise TypeError(
- '"Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted"'
- )
-
def copy(
self,
*,
api_key: str | None = None,
+ environment: Literal["production", "development"] | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
http_client: httpx.AsyncClient | None = None,
@@ -401,6 +410,7 @@ def copy(
return self.__class__(
api_key=api_key or self.api_key,
base_url=base_url or self.base_url,
+ environment=environment or self._environment,
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
http_client=http_client,
max_retries=max_retries if is_given(max_retries) else self.max_retries,
@@ -413,25 +423,6 @@ def copy(
# client.with_options(timeout=10).foo.create(...)
with_options = copy
- async def get_root(
- self,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """Root"""
- return await self.get(
- "/",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
@override
def _make_status_error(
self,
@@ -468,7 +459,6 @@ def _make_status_error(
class AgentexWithRawResponse:
def __init__(self, client: Agentex) -> None:
- self.echo = echo.EchoResourceWithRawResponse(client.echo)
self.agents = agents.AgentsResourceWithRawResponse(client.agents)
self.tasks = tasks.TasksResourceWithRawResponse(client.tasks)
self.messages = messages.MessagesResourceWithRawResponse(client.messages)
@@ -477,14 +467,9 @@ def __init__(self, client: Agentex) -> None:
self.events = events.EventsResourceWithRawResponse(client.events)
self.tracker = tracker.TrackerResourceWithRawResponse(client.tracker)
- self.get_root = to_raw_response_wrapper(
- client.get_root,
- )
-
class AsyncAgentexWithRawResponse:
def __init__(self, client: AsyncAgentex) -> None:
- self.echo = echo.AsyncEchoResourceWithRawResponse(client.echo)
self.agents = agents.AsyncAgentsResourceWithRawResponse(client.agents)
self.tasks = tasks.AsyncTasksResourceWithRawResponse(client.tasks)
self.messages = messages.AsyncMessagesResourceWithRawResponse(client.messages)
@@ -493,14 +478,9 @@ def __init__(self, client: AsyncAgentex) -> None:
self.events = events.AsyncEventsResourceWithRawResponse(client.events)
self.tracker = tracker.AsyncTrackerResourceWithRawResponse(client.tracker)
- self.get_root = async_to_raw_response_wrapper(
- client.get_root,
- )
-
class AgentexWithStreamedResponse:
def __init__(self, client: Agentex) -> None:
- self.echo = echo.EchoResourceWithStreamingResponse(client.echo)
self.agents = agents.AgentsResourceWithStreamingResponse(client.agents)
self.tasks = tasks.TasksResourceWithStreamingResponse(client.tasks)
self.messages = messages.MessagesResourceWithStreamingResponse(client.messages)
@@ -509,14 +489,9 @@ def __init__(self, client: Agentex) -> None:
self.events = events.EventsResourceWithStreamingResponse(client.events)
self.tracker = tracker.TrackerResourceWithStreamingResponse(client.tracker)
- self.get_root = to_streamed_response_wrapper(
- client.get_root,
- )
-
class AsyncAgentexWithStreamedResponse:
def __init__(self, client: AsyncAgentex) -> None:
- self.echo = echo.AsyncEchoResourceWithStreamingResponse(client.echo)
self.agents = agents.AsyncAgentsResourceWithStreamingResponse(client.agents)
self.tasks = tasks.AsyncTasksResourceWithStreamingResponse(client.tasks)
self.messages = messages.AsyncMessagesResourceWithStreamingResponse(client.messages)
@@ -525,10 +500,6 @@ def __init__(self, client: AsyncAgentex) -> None:
self.events = events.AsyncEventsResourceWithStreamingResponse(client.events)
self.tracker = tracker.AsyncTrackerResourceWithStreamingResponse(client.tracker)
- self.get_root = async_to_streamed_response_wrapper(
- client.get_root,
- )
-
Client = Agentex
diff --git a/src/agentex/_version.py b/src/agentex/_version.py
index 3b76e3fed..5afa0a716 100644
--- a/src/agentex/_version.py
+++ b/src/agentex/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "agentex"
-__version__ = "0.0.1-alpha.1" # x-release-please-version
+__version__ = "0.1.0-alpha.1" # x-release-please-version
diff --git a/src/agentex/lib/__init__.py b/src/agentex/lib/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/adk/__init__.py b/src/agentex/lib/adk/__init__.py
new file mode 100644
index 000000000..9989fe5e5
--- /dev/null
+++ b/src/agentex/lib/adk/__init__.py
@@ -0,0 +1,41 @@
+
+from agentex.lib.adk._modules.acp import ACPModule
+from agentex.lib.adk._modules.agents import AgentsModule
+from agentex.lib.adk._modules.agent_task_tracker import AgentTaskTrackerModule
+from agentex.lib.adk._modules.events import EventsModule
+from agentex.lib.adk._modules.messages import MessagesModule
+from agentex.lib.adk._modules.state import StateModule
+from agentex.lib.adk._modules.streaming import StreamingModule
+from agentex.lib.adk._modules.tasks import TasksModule
+from agentex.lib.adk._modules.tracing import TracingModule
+
+from agentex.lib.adk import providers
+from agentex.lib.adk import utils
+
+acp = ACPModule()
+agents = AgentsModule()
+tasks = TasksModule()
+messages = MessagesModule()
+state = StateModule()
+streaming = StreamingModule()
+tracing = TracingModule()
+events = EventsModule()
+agent_task_tracker = AgentTaskTrackerModule()
+
+__all__ = [
+ # Core
+ "acp",
+ "agents",
+ "tasks",
+ "messages",
+ "state",
+ "streaming",
+ "tracing",
+ "events",
+ "agent_task_tracker",
+
+ # Providers
+ "providers",
+ # Utils
+ "utils",
+]
diff --git a/src/agentex/lib/adk/_modules/__init__.py b/src/agentex/lib/adk/_modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/adk/_modules/acp.py b/src/agentex/lib/adk/_modules/acp.py
new file mode 100644
index 000000000..657f8e144
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/acp.py
@@ -0,0 +1,247 @@
+from datetime import timedelta
+from typing import Any
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.acp.acp import ACPService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.acp.acp_activities import (
+ ACPActivityName,
+ EventSendParams,
+ MessageSendParams,
+ TaskCancelParams,
+ TaskCreateParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.task_message import TaskMessage
+from agentex.types.task import Task
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+from agentex.types.task_message_content import TaskMessageContent
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class ACPModule:
+ """
+ Module for managing Agent to Client Protocol (ACP) agent operations in Agentex.
+
+ This interface provides high-level methods for interacting with the agent through the ACP.
+ """
+
+ def __init__(self, acp_service: ACPService | None = None):
+ """
+ Initialize the ACP module.
+
+ Args:
+ acp_activities (Optional[ACPActivities]): Optional pre-configured ACP activities. If None, will be auto-initialized.
+ """
+ if acp_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._acp_service = ACPService(agentex_client=agentex_client, tracer=tracer)
+ else:
+ self._acp_service = acp_service
+
+ async def create_task(
+ self,
+ name: str | None = None,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ params: dict[str, Any] | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ):
+ """
+ Create a new task.
+
+ Args:
+ name: The name of the task.
+ agent_id: The ID of the agent to create the task for.
+ agent_name: The name of the agent to create the task for.
+ params: The parameters for the task.
+ start_to_close_timeout: The start to close timeout for the task.
+ heartbeat_timeout: The heartbeat timeout for the task.
+ retry_policy: The retry policy for the task.
+
+ Returns:
+ The task entry.
+ """
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=ACPActivityName.TASK_CREATE,
+ request=TaskCreateParams(
+ name=name,
+ agent_id=agent_id,
+ agent_name=agent_name,
+ params=params,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ ),
+ response_type=Task,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._acp_service.task_create(
+ name=name,
+ agent_id=agent_id,
+ agent_name=agent_name,
+ params=params,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def send_event(
+ self,
+ task_id: str,
+ content: TaskMessageContent,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ):
+ """
+ Send an event to a task.
+
+ Args:
+ task_id: The ID of the task to send the event to.
+ data: The data to send to the event.
+ agent_id: The ID of the agent to send the event to.
+ agent_name: The name of the agent to send the event to.
+
+ Returns:
+ The event entry.
+ """
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=ACPActivityName.EVENT_SEND,
+ request=EventSendParams(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ task_id=task_id,
+ content=content,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ ),
+ response_type=None,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._acp_service.event_send(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ task_id=task_id,
+ content=content,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def send_message(
+ self,
+ task_id: str,
+ content: TaskMessageContent,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ):
+ """
+ Send a message to a task.
+
+ Args:
+ task_id: The ID of the task to send the message to.
+ content: The task message content to send to the task.
+ agent_id: The ID of the agent to send the message to.
+ agent_name: The name of the agent to send the message to.
+
+ Returns:
+ The message entry.
+ """
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=ACPActivityName.MESSAGE_SEND,
+ request=MessageSendParams(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ task_id=task_id,
+ content=content,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ ),
+ response_type=TaskMessage,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._acp_service.message_send(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ task_id=task_id,
+ content=content,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def cancel_task(
+ self,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ):
+ """
+ Cancel a task.
+
+ Args:
+ task_id: The ID of the task to cancel.
+ task_name: The name of the task to cancel.
+ trace_id: The trace ID for the task.
+ parent_span_id: The parent span ID for the task.
+ start_to_close_timeout: The start to close timeout for the task.
+ heartbeat_timeout: The heartbeat timeout for the task.
+ retry_policy: The retry policy for the task.
+
+ Returns:
+ The task entry.
+ """
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=ACPActivityName.TASK_CANCEL,
+ request=TaskCancelParams(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ ),
+ response_type=None,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._acp_service.task_cancel(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/agent_task_tracker.py b/src/agentex/lib/adk/_modules/agent_task_tracker.py
new file mode 100644
index 000000000..44ffd5184
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/agent_task_tracker.py
@@ -0,0 +1,176 @@
+from datetime import timedelta
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.agent_task_tracker import AgentTaskTrackerService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.agent_task_tracker_activities import (
+ AgentTaskTrackerActivityName,
+ GetAgentTaskTrackerByTaskAndAgentParams,
+ GetAgentTaskTrackerParams,
+ UpdateAgentTaskTrackerParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.agent_task_tracker import AgentTaskTracker
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all agent task tracker operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class AgentTaskTrackerModule:
+ """
+ Module for managing agent task trackers in Agentex.
+ Provides high-level async methods for retrieving, filtering, and updating agent task trackers.
+ """
+
+ def __init__(
+ self,
+ agent_task_tracker_service: AgentTaskTrackerService | None = None,
+ ):
+ if agent_task_tracker_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._agent_task_tracker_service = AgentTaskTrackerService(
+ agentex_client=agentex_client, tracer=tracer
+ )
+ else:
+ self._agent_task_tracker_service = agent_task_tracker_service
+
+ async def get(
+ self,
+ tracker_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> AgentTaskTracker:
+ """
+ Get an agent task tracker by ID.
+
+ Args:
+ tracker_id (str): The ID of the tracker.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ AgentTaskTracker: The agent task tracker.
+ """
+ params = GetAgentTaskTrackerParams(
+ tracker_id=tracker_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=AgentTaskTrackerActivityName.GET_AGENT_TASK_TRACKER,
+ request=params,
+ response_type=AgentTaskTracker,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._agent_task_tracker_service.get_agent_task_tracker(
+ tracker_id=tracker_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def get_by_task_and_agent(
+ self,
+ task_id: str,
+ agent_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> AgentTaskTracker | None:
+ """
+ Get an agent task tracker by task ID and agent ID.
+ """
+ params = GetAgentTaskTrackerByTaskAndAgentParams(
+ task_id=task_id,
+ agent_id=agent_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=AgentTaskTrackerActivityName.GET_AGENT_TASK_TRACKER_BY_TASK_AND_AGENT,
+ request=params,
+ response_type=AgentTaskTracker,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._agent_task_tracker_service.get_by_task_and_agent(
+ task_id=task_id,
+ agent_id=agent_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def update(
+ self,
+ tracker_id: str,
+ last_processed_event_id: str | None = None,
+ status: str | None = None,
+ status_reason: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> AgentTaskTracker:
+ """
+ Update an agent task tracker.
+
+ Args:
+ tracker_id (str): The ID of the tracker to update.
+ request (UpdateAgentTaskTrackerRequest): The update request containing the new values.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ AgentTaskTracker: The updated agent task tracker.
+ """
+ params = UpdateAgentTaskTrackerParams(
+ tracker_id=tracker_id,
+ last_processed_event_id=last_processed_event_id,
+ status=status,
+ status_reason=status_reason,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=AgentTaskTrackerActivityName.UPDATE_AGENT_TASK_TRACKER,
+ request=params,
+ response_type=AgentTaskTracker,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._agent_task_tracker_service.update_agent_task_tracker(
+ tracker_id=tracker_id,
+ last_processed_event_id=last_processed_event_id,
+ status=status,
+ status_reason=status_reason,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/agents.py b/src/agentex/lib/adk/_modules/agents.py
new file mode 100644
index 000000000..5e1bbe0e0
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/agents.py
@@ -0,0 +1,77 @@
+from datetime import timedelta
+from typing import Optional
+
+from agentex.lib.core.temporal.activities.adk.agents_activities import AgentsActivityName, GetAgentParams
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.agents import AgentsService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.agent import Agent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class AgentsModule:
+ """
+ Module for managing agents in Agentex.
+ Provides high-level async methods for retrieving, listing, and deleting agents.
+ """
+
+ def __init__(
+ self,
+ agents_service: Optional[AgentsService] = None,
+ ):
+ if agents_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._agents_service = AgentsService(agentex_client=agentex_client, tracer=tracer)
+ else:
+ self._agents_service = agents_service
+
+ async def get(
+ self,
+ *,
+ agent_id: Optional[str] = None,
+ agent_name: Optional[str] = None,
+ trace_id: Optional[str] = None,
+ parent_span_id: Optional[str] = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Agent:
+ """
+ Get an agent by ID or name.
+ Args:
+ agent_id: The ID of the agent to retrieve.
+ agent_name: The name of the agent to retrieve.
+ Returns:
+ The agent entry.
+ """
+ params = GetAgentParams(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=AgentsActivityName.GET_AGENT,
+ request=params,
+ response_type=Agent,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._agents_service.get_agent(
+ agent_id=agent_id,
+ agent_name=agent_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/events.py b/src/agentex/lib/adk/_modules/events.py
new file mode 100644
index 000000000..529db4c3c
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/events.py
@@ -0,0 +1,141 @@
+from datetime import timedelta
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.events import EventsService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.events_activities import (
+ EventsActivityName,
+ GetEventParams,
+ ListEventsParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.event import Event
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all events operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class EventsModule:
+ """
+ Module for managing events in Agentex.
+ Provides high-level async methods for retrieving and listing events.
+ """
+
+ def __init__(
+ self,
+ events_service: EventsService | None = None,
+ ):
+ if events_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._events_service = EventsService(
+ agentex_client=agentex_client, tracer=tracer
+ )
+ else:
+ self._events_service = events_service
+
+ async def get(
+ self,
+ event_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Event | None:
+ """
+ Get an event by ID.
+
+ Args:
+ event_id (str): The ID of the event.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ Optional[Event]: The event if found, None otherwise.
+ """
+ params = GetEventParams(
+ event_id=event_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=EventsActivityName.GET_EVENT,
+ request=params,
+ response_type=Event,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._events_service.get_event(
+ event_id=event_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def list_events(
+ self,
+ task_id: str,
+ agent_id: str,
+ last_processed_event_id: str | None = None,
+ limit: int | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> list[Event]:
+ """
+ List events for a specific task and agent.
+
+ Args:
+ task_id (str): The ID of the task.
+ agent_id (str): The ID of the agent.
+ last_processed_event_id (Optional[str]): Optional event ID to get events after this ID.
+ limit (Optional[int]): Optional limit on number of results.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ List[Event]: List of events ordered by sequence_id.
+ """
+ params = ListEventsParams(
+ task_id=task_id,
+ agent_id=agent_id,
+ last_processed_event_id=last_processed_event_id,
+ limit=limit,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=EventsActivityName.LIST_EVENTS,
+ request=params,
+ response_type=list[Event],
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._events_service.list_events(
+ task_id=task_id,
+ agent_id=agent_id,
+ last_processed_event_id=last_processed_event_id,
+ limit=limit,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/messages.py b/src/agentex/lib/adk/_modules/messages.py
new file mode 100644
index 000000000..24d7f0f07
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/messages.py
@@ -0,0 +1,285 @@
+from datetime import timedelta
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.streams.adapter_redis import RedisEventStreamRepository
+from agentex.lib.core.services.adk.messages import MessagesService
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.messages_activities import (
+ CreateMessageParams,
+ CreateMessagesBatchParams,
+ ListMessagesParams,
+ MessagesActivityName,
+ UpdateMessageParams,
+ UpdateMessagesBatchParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.task_message import TaskMessage, TaskMessageContent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all message operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class MessagesModule:
+ """
+ Module for managing task messages in Agentex.
+ Provides high-level async methods for creating, retrieving, updating, and deleting messages.
+ """
+
+ def __init__(
+ self,
+ messages_service: MessagesService | None = None,
+ ):
+ if messages_service is None:
+ agentex_client = AsyncAgentex()
+ stream_repository = RedisEventStreamRepository()
+ streaming_service = StreamingService(
+ agentex_client=agentex_client,
+ stream_repository=stream_repository,
+ )
+ tracer = AsyncTracer(agentex_client)
+ self._messages_service = MessagesService(
+ agentex_client=agentex_client,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ else:
+ self._messages_service = messages_service
+
+ async def create(
+ self,
+ task_id: str,
+ content: TaskMessageContent,
+ emit_updates: bool = True,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> TaskMessage:
+ """
+ Create a new message for a task.
+
+ Args:
+ task_id (str): The ID of the task.
+ message (TaskMessage): The message to create.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ TaskMessageEntity: The created message.
+ """
+ params = CreateMessageParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ task_id=task_id,
+ content=content,
+ emit_updates=emit_updates,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=MessagesActivityName.CREATE_MESSAGE,
+ request=params,
+ response_type=TaskMessage,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._messages_service.create_message(
+ task_id=task_id,
+ content=content,
+ emit_updates=emit_updates,
+ )
+
+ async def update(
+ self,
+ task_id: str,
+ message_id: str,
+ content: TaskMessageContent,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> TaskMessage:
+ """
+ Update a message for a task.
+
+ Args:
+ task_id (str): The ID of the task.
+ message_id (str): The ID of the message.
+ message (TaskMessage): The message to update.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ TaskMessageEntity: The updated message.
+ """
+ params = UpdateMessageParams(
+ task_id=task_id,
+ message_id=message_id,
+ content=content,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=MessagesActivityName.UPDATE_MESSAGE,
+ request=params,
+ response_type=TaskMessage,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._messages_service.update_message(
+ task_id=task_id,
+ message_id=message_id,
+ content=content,
+ )
+
+ async def create_batch(
+ self,
+ task_id: str,
+ contents: list[TaskMessageContent],
+ emit_updates: bool = True,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> list[TaskMessage]:
+ """
+ Create a batch of messages for a task.
+
+ Args:
+ task_id (str): The ID of the task.
+ messages (List[TaskMessage]): The messages to create.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ List[TaskMessageEntity]: The created messages.
+ """
+ params = CreateMessagesBatchParams(
+ task_id=task_id,
+ contents=contents,
+ emit_updates=emit_updates,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=MessagesActivityName.CREATE_MESSAGES_BATCH,
+ request=params,
+ response_type=list[TaskMessage],
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._messages_service.create_messages_batch(
+ task_id=task_id,
+ contents=contents,
+ emit_updates=emit_updates,
+ )
+
+ async def update_batch(
+ self,
+ task_id: str,
+ updates: dict[str, TaskMessageContent],
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> list[TaskMessage]:
+ """
+ Update a batch of messages for a task.
+
+ Args:
+ task_id (str): The ID of the task.
+ updates (Dict[str, TaskMessage]): The updates to apply to the messages.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ List[TaskMessageEntity]: The updated messages.
+ """
+ params = UpdateMessagesBatchParams(
+ task_id=task_id,
+ updates=updates,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=MessagesActivityName.UPDATE_MESSAGES_BATCH,
+ request=params,
+ response_type=list[TaskMessage],
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._messages_service.update_messages_batch(
+ task_id=task_id,
+ updates=updates,
+ )
+
+ async def list(
+ self,
+ task_id: str,
+ limit: int | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> list[TaskMessage]:
+ """
+ List messages for a task.
+
+ Args:
+ task_id (str): The ID of the task.
+ limit (Optional[int]): The maximum number of messages to return.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ List[TaskMessageEntity]: The list of messages.
+ """
+ params = ListMessagesParams(
+ task_id=task_id,
+ limit=limit,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=MessagesActivityName.LIST_MESSAGES,
+ request=params,
+ response_type=list[TaskMessage],
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._messages_service.list_messages(
+ task_id=task_id,
+ limit=limit,
+ )
diff --git a/src/agentex/lib/adk/_modules/state.py b/src/agentex/lib/adk/_modules/state.py
new file mode 100644
index 000000000..c13a6b59c
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/state.py
@@ -0,0 +1,291 @@
+from datetime import timedelta
+from typing import Any
+
+from pydantic import BaseModel
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.state import StateService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.state_activities import (
+ CreateStateParams,
+ DeleteStateParams,
+ GetStateParams,
+ StateActivityName,
+ UpdateStateParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.state import State
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all state operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class StateModule:
+ """
+ Module for managing task state in Agentex.
+ Provides high-level async methods for creating, retrieving, updating, and deleting state.
+ """
+
+ def __init__(
+ self,
+ state_service: StateService | None = None,
+ ):
+ if state_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._state_service = StateService(
+ agentex_client=agentex_client, tracer=tracer
+ )
+ else:
+ self._state_service = state_service
+
+ async def create(
+ self,
+ task_id: str,
+ agent_id: str,
+ state: dict[str, Any] | BaseModel,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> State:
+ """
+ Create a new state for a task and agent.
+
+ Args:
+ task_id (str): The ID of the task.
+ agent_id (str): The ID of the agent.
+ state (Dict[str, Any]): The state to create.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ State: The created state.
+ """
+ state_dict = state.model_dump() if isinstance(state, BaseModel) else state
+ params = CreateStateParams(
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state_dict,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=StateActivityName.CREATE_STATE,
+ request=params,
+ response_type=State,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._state_service.create_state(
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state_dict,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def get(
+ self,
+ state_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> State | None:
+ """
+ Get a state by ID.
+
+ Args:
+ state_id (str): The ID of the state.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ Optional[State]: The state if found, None otherwise.
+ """
+ params = GetStateParams(
+ state_id=state_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=StateActivityName.GET_STATE,
+ request=params,
+ response_type=State,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._state_service.get_state(
+ state_id=state_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def get_by_task_and_agent(
+ self,
+ task_id: str,
+ agent_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> State | None:
+ """
+ Get a state by task and agent ID. A state is uniquely identified by task and the agent that created it.
+
+ Args:
+ task_id (str): The ID of the task.
+ agent_id (str): The ID of the agent.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ Optional[State]: The state if found, None otherwise.
+ """
+ params = GetStateParams(
+ task_id=task_id,
+ agent_id=agent_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=StateActivityName.GET_STATE,
+ request=params,
+ response_type=State,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._state_service.get_state(
+ task_id=task_id,
+ agent_id=agent_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def update(
+ self,
+ state_id: str,
+ task_id: str,
+ agent_id: str,
+ state: dict[str, Any] | BaseModel,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> State:
+ """
+ Update a state by ID.
+
+ Args:
+ state_id (str): The ID of the state.
+ task_id (str): The ID of the task.
+ agent_id (str): The ID of the agent.
+ state (Dict[str, Any]): The state to update.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ State: The updated state.
+ """
+ state_dict = state.model_dump() if isinstance(state, BaseModel) else state
+ params = UpdateStateParams(
+ state_id=state_id,
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state_dict,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=StateActivityName.UPDATE_STATE,
+ request=params,
+ response_type=State,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._state_service.update_state(
+ state_id=state_id,
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state_dict,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def delete(
+ self,
+ state_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> State:
+ """
+ Delete a state by ID.
+
+ Args:
+ state_id (str): The ID of the state.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ State: The deleted state.
+ """
+ params = DeleteStateParams(
+ state_id=state_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=StateActivityName.DELETE_STATE,
+ request=params,
+ response_type=State,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._state_service.delete_state(
+ state_id=state_id,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/streaming.py b/src/agentex/lib/adk/_modules/streaming.py
new file mode 100644
index 000000000..589384ed4
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/streaming.py
@@ -0,0 +1,75 @@
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.streams.adapter_redis import RedisEventStreamRepository
+from agentex.lib.core.services.adk.streaming import (
+ StreamingService,
+ StreamingTaskMessageContext,
+)
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class StreamingModule:
+ """
+ Module for streaming content to clients in Agentex.
+
+ This interface wraps around the StreamingService and provides a high-level API
+ for streaming events to clients, supporting both synchronous and asynchronous
+ (Temporal workflow) contexts.
+ """
+
+ def __init__(self, streaming_service: StreamingService | None = None):
+ """
+ Initialize the streaming interface.
+
+ Args:
+ streaming_service (Optional[StreamingService]): Optional StreamingService instance. If not provided,
+ a new service will be created with default parameters.
+ """
+ if streaming_service is None:
+ stream_repository = RedisEventStreamRepository()
+ agentex_client = AsyncAgentex()
+ self._streaming_service = StreamingService(
+ agentex_client=agentex_client,
+ stream_repository=stream_repository,
+ )
+ else:
+ self._streaming_service = streaming_service
+
+ def streaming_task_message_context(
+ self,
+ task_id: str,
+ initial_content: TaskMessageContent,
+ ) -> StreamingTaskMessageContext:
+ """
+ Create a streaming context for managing TaskMessage lifecycle.
+
+ This is a context manager that automatically creates a TaskMessage, sends START event,
+ and sends DONE event when the context exits. Perfect for simple streaming scenarios.
+
+ Args:
+ task_id: The ID of the task
+ initial_content: The initial content for the TaskMessage
+ agentex_client: The agentex client for creating/updating messages
+
+ Returns:
+ StreamingTaskMessageContext: Context manager for streaming operations
+ """
+ # Note: We don't support Temporal activities for streaming context methods yet
+ # since they involve complex state management across multiple activity calls
+ if in_temporal_workflow():
+ logger.warning(
+ "Streaming context methods are not yet supported in Temporal workflows. "
+ "You should wrap the entire streaming context in an activity. All nondeterministic network calls should be wrapped in an activity and generators cannot operate across activities and workflows."
+ )
+
+ return self._streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=initial_content,
+ )
diff --git a/src/agentex/lib/adk/_modules/tasks.py b/src/agentex/lib/adk/_modules/tasks.py
new file mode 100644
index 000000000..8b8f949a7
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/tasks.py
@@ -0,0 +1,124 @@
+from datetime import timedelta
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.tasks import TasksService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.tasks_activities import (
+ DeleteTaskParams,
+ GetTaskParams,
+ TasksActivityName,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.task import Task
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class TasksModule:
+ """
+ Module for managing tasks in Agentex.
+ Provides high-level async methods for retrieving, listing, and deleting tasks.
+ """
+
+ def __init__(
+ self,
+ tasks_service: TasksService | None = None,
+ ):
+ if tasks_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._tasks_service = TasksService(
+ agentex_client=agentex_client, tracer=tracer
+ )
+ else:
+ self._tasks_service = tasks_service
+
+ async def get(
+ self,
+ *,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Task:
+ """
+ Get a task by ID or name.
+ Args:
+ task_id: The ID of the task to retrieve.
+ task_name: The name of the task to retrieve.
+ Returns:
+ The task entry.
+ """
+ params = GetTaskParams(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=TasksActivityName.GET_TASK,
+ request=params,
+ response_type=Task,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._tasks_service.get_task(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def delete(
+ self,
+ *,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Task:
+ """
+ Delete a task by ID or name.
+ Args:
+ task_id: The ID of the task to delete.
+ task_name: The name of the task to delete.
+ Returns:
+ The deleted task entry.
+ """
+ params = DeleteTaskParams(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=TasksActivityName.DELETE_TASK,
+ request=params,
+ response_type=Task,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._tasks_service.delete_task(
+ task_id=task_id,
+ task_name=task_name,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/_modules/tracing.py b/src/agentex/lib/adk/_modules/tracing.py
new file mode 100644
index 000000000..138f25c93
--- /dev/null
+++ b/src/agentex/lib/adk/_modules/tracing.py
@@ -0,0 +1,194 @@
+from collections.abc import AsyncGenerator
+from contextlib import asynccontextmanager
+from datetime import timedelta
+from typing import Any
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.tracing import TracingService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.tracing_activities import (
+ EndSpanParams,
+ StartSpanParams,
+ TracingActivityName,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.span import Span
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class TracingModule:
+ """
+ Module for managing tracing and span operations in Agentex.
+ Provides high-level async methods for starting, ending, and managing spans for distributed tracing.
+ """
+
+ def __init__(self, tracing_service: TracingService | None = None):
+ """
+ Initialize the tracing interface.
+
+ Args:
+ tracing_activities (Optional[TracingActivities]): Optional pre-configured tracing activities. If None, will be auto-initialized.
+ """
+ if tracing_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._tracing_service = TracingService(tracer=tracer)
+ else:
+ self._tracing_service = tracing_service
+
+ @asynccontextmanager
+ async def span(
+ self,
+ trace_id: str,
+ name: str,
+ input: list[Any] | dict[str, Any] | BaseModel | None = None,
+ data: list[Any] | dict[str, Any] | BaseModel | None = None,
+ parent_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=5),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> AsyncGenerator[Span | None, None]:
+ """
+ Async context manager for creating and automatically closing a span.
+ Yields the started span object. The span is automatically ended when the context exits.
+
+ If trace_id is falsy, acts as a no-op context manager.
+
+ Args:
+ trace_id (str): The trace ID for the span.
+ name (str): The name of the span.
+ input (Union[List, Dict, BaseModel]): The input for the span.
+ parent_id (Optional[str]): The parent span ID for the span.
+ data (Optional[Union[List, Dict, BaseModel]]): The data for the span.
+ start_to_close_timeout (timedelta): The start to close timeout for the span.
+ heartbeat_timeout (timedelta): The heartbeat timeout for the span.
+ retry_policy (RetryPolicy): The retry policy for the span.
+
+ Returns:
+ AsyncGenerator[Optional[Span], None]: An async generator that yields the started span object.
+ """
+ if not trace_id:
+ yield None
+ return
+
+ span: Span | None = await self.start_span(
+ trace_id=trace_id,
+ name=name,
+ input=input,
+ parent_id=parent_id,
+ data=data,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ try:
+ yield span
+ finally:
+ if span:
+ await self.end_span(
+ trace_id=trace_id,
+ span=span,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+
+ async def start_span(
+ self,
+ trace_id: str,
+ name: str,
+ input: list[Any] | dict[str, Any] | BaseModel | None = None,
+ parent_id: str | None = None,
+ data: list[Any] | dict[str, Any] | BaseModel | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=1),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Span | None:
+ """
+ Start a new span in the trace.
+
+ Args:
+ trace_id (str): The trace ID for the span.
+ name (str): The name of the span.
+ input (Union[List, Dict, BaseModel]): The input for the span.
+ parent_id (Optional[str]): The parent span ID for the span.
+ data (Optional[Union[List, Dict, BaseModel]]): The data for the span.
+ start_to_close_timeout (timedelta): The start to close timeout for the span.
+ heartbeat_timeout (timedelta): The heartbeat timeout for the span.
+ retry_policy (RetryPolicy): The retry policy for the span.
+
+ Returns:
+ Span: The started span object.
+ """
+ params = StartSpanParams(
+ trace_id=trace_id,
+ parent_id=parent_id,
+ name=name,
+ input=input,
+ data=data,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=TracingActivityName.START_SPAN,
+ request=params,
+ response_type=Span,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._tracing_service.start_span(
+ trace_id=trace_id,
+ name=name,
+ input=input,
+ parent_id=parent_id,
+ data=data,
+ )
+
+ async def end_span(
+ self,
+ trace_id: str,
+ span: Span,
+ start_to_close_timeout: timedelta = timedelta(seconds=5),
+ heartbeat_timeout: timedelta = timedelta(seconds=1),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Span:
+ """
+ End an existing span in the trace.
+
+ Args:
+ trace_id (str): The trace ID for the span.
+ span (Span): The span to end.
+ start_to_close_timeout (timedelta): The start to close timeout for the span.
+ heartbeat_timeout (timedelta): The heartbeat timeout for the span.
+ retry_policy (RetryPolicy): The retry policy for the span.
+
+ Returns:
+ Span: The ended span object.
+ """
+ params = EndSpanParams(
+ trace_id=trace_id,
+ span=span,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=TracingActivityName.END_SPAN,
+ request=params,
+ response_type=Span,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+ else:
+ return await self._tracing_service.end_span(
+ trace_id=trace_id,
+ span=span,
+ )
diff --git a/src/agentex/lib/adk/providers/__init__.py b/src/agentex/lib/adk/providers/__init__.py
new file mode 100644
index 000000000..cf3696b82
--- /dev/null
+++ b/src/agentex/lib/adk/providers/__init__.py
@@ -0,0 +1,9 @@
+from agentex.lib.adk.providers._modules.litellm import LiteLLMModule
+from agentex.lib.adk.providers._modules.openai import OpenAIModule
+from agentex.lib.adk.providers._modules.sgp import SGPModule
+
+openai = OpenAIModule()
+litellm = LiteLLMModule()
+sgp = SGPModule()
+
+__all__ = ["openai", "litellm", "sgp"]
diff --git a/src/agentex/lib/adk/providers/_modules/__init__.py b/src/agentex/lib/adk/providers/_modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/adk/providers/_modules/litellm.py b/src/agentex/lib/adk/providers/_modules/litellm.py
new file mode 100644
index 000000000..f638e308c
--- /dev/null
+++ b/src/agentex/lib/adk/providers/_modules/litellm.py
@@ -0,0 +1,232 @@
+from collections.abc import AsyncGenerator
+from datetime import timedelta
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.llm.adapter_litellm import LiteLLMGateway
+from agentex.lib.core.adapters.streams.adapter_redis import RedisEventStreamRepository
+from agentex.lib.core.services.adk.providers.litellm import LiteLLMService
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.providers.litellm_activities import (
+ ChatCompletionAutoSendParams,
+ ChatCompletionParams,
+ ChatCompletionStreamAutoSendParams,
+ LiteLLMActivityName,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.llm_messages import Completion, LLMConfig
+from agentex.types.task_message import TaskMessage
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all LiteLLM operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class LiteLLMModule:
+ """
+ Module for managing LiteLLM agent operations in Agentex.
+ Provides high-level methods for chat completion, streaming, agentic streaming.
+ """
+
+ def __init__(
+ self,
+ litellm_service: LiteLLMService | None = None,
+ ):
+ if litellm_service is None:
+ # Create default service
+ agentex_client = AsyncAgentex()
+ stream_repository = RedisEventStreamRepository()
+ streaming_service = StreamingService(
+ agentex_client=agentex_client,
+ stream_repository=stream_repository,
+ )
+ litellm_gateway = LiteLLMGateway()
+ tracer = AsyncTracer(agentex_client)
+ self._litellm_service = LiteLLMService(
+ agentex_client=agentex_client,
+ llm_gateway=litellm_gateway,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ else:
+ self._litellm_service = litellm_service
+
+ async def chat_completion(
+ self,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=120),
+ heartbeat_timeout: timedelta = timedelta(seconds=120),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> Completion:
+ """
+ Perform a chat completion using LiteLLM.
+
+ Args:
+ llm_config (LLMConfig): The configuration for the LLM.
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ Completion: An OpenAI compatible Completion object
+ """
+ if in_temporal_workflow():
+ params = ChatCompletionParams(
+ trace_id=trace_id, parent_span_id=parent_span_id, llm_config=llm_config
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=LiteLLMActivityName.CHAT_COMPLETION,
+ request=params,
+ response_type=Completion,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._litellm_service.chat_completion(
+ llm_config=llm_config,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def chat_completion_auto_send(
+ self,
+ task_id: str,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=120),
+ heartbeat_timeout: timedelta = timedelta(seconds=120),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> TaskMessage | None:
+ """
+ Chat completion with automatic TaskMessage creation.
+
+ Args:
+ task_id (str): The ID of the task.
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=False).
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ TaskMessage: The final TaskMessage
+ """
+ if in_temporal_workflow():
+ # Use streaming activity with stream=False for non-streaming auto-send
+ params = ChatCompletionAutoSendParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ task_id=task_id,
+ llm_config=llm_config,
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=LiteLLMActivityName.CHAT_COMPLETION_AUTO_SEND,
+ request=params,
+ response_type=TaskMessage,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._litellm_service.chat_completion_auto_send(
+ task_id=task_id,
+ llm_config=llm_config,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
+
+ async def chat_completion_stream(
+ self,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> AsyncGenerator[Completion, None]:
+ """
+ Stream chat completion chunks using LiteLLM.
+
+ DEFAULT: Returns raw streaming chunks for manual handling.
+
+ NOTE: This method does NOT work in Temporal workflows!
+ Temporal activities cannot return generators. Use chat_completion_stream_auto_send() instead.
+
+ Args:
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=True).
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ AsyncGenerator[Completion, None]: Generator yielding completion chunks
+
+ Raises:
+ ValueError: If called from within a Temporal workflow
+ """
+ # Delegate to service - it handles temporal workflow checks
+ async for chunk in self._litellm_service.chat_completion_stream(
+ llm_config=llm_config,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ ):
+ yield chunk
+
+ async def chat_completion_stream_auto_send(
+ self,
+ task_id: str,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=120),
+ heartbeat_timeout: timedelta = timedelta(seconds=120),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> TaskMessage | None:
+ """
+ Stream chat completion with automatic TaskMessage creation and streaming.
+
+ Args:
+ task_id (str): The ID of the task to run the agent for.
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=True).
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ TaskMessage: The final TaskMessage after streaming is complete
+ """
+ if in_temporal_workflow():
+ params = ChatCompletionStreamAutoSendParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ task_id=task_id,
+ llm_config=llm_config,
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=LiteLLMActivityName.CHAT_COMPLETION_STREAM_AUTO_SEND,
+ request=params,
+ response_type=TaskMessage,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._litellm_service.chat_completion_stream_auto_send(
+ task_id=task_id,
+ llm_config=llm_config,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/adk/providers/_modules/openai.py b/src/agentex/lib/adk/providers/_modules/openai.py
new file mode 100644
index 000000000..e9f2dd68e
--- /dev/null
+++ b/src/agentex/lib/adk/providers/_modules/openai.py
@@ -0,0 +1,416 @@
+from datetime import timedelta
+from typing import Any, Literal
+
+from agents import Agent, RunResult, RunResultStreaming
+from agents.agent import StopAtTools, ToolsToFinalOutputFunction
+from agents.agent_output import AgentOutputSchemaBase
+from agents.model_settings import ModelSettings
+from agents.tool import Tool
+from mcp import StdioServerParameters
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.streams.adapter_redis import RedisEventStreamRepository
+from agentex.lib.core.services.adk.providers.openai import OpenAIService
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.providers.openai_activities import (
+ OpenAIActivityName,
+ RunAgentAutoSendParams,
+ RunAgentParams,
+ RunAgentStreamedAutoSendParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.agent_results import (
+ SerializableRunResult,
+ SerializableRunResultStreaming,
+)
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+# Default retry policy for all OpenAI operations
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class OpenAIModule:
+ """
+ Module for managing OpenAI agent operations in Agentex.
+ Provides high-level methods for running agents with and without streaming.
+ """
+
+ def __init__(
+ self,
+ openai_service: OpenAIService | None = None,
+ ):
+ if openai_service is None:
+ # Create default service
+ agentex_client = AsyncAgentex()
+ stream_repository = RedisEventStreamRepository()
+ streaming_service = StreamingService(
+ agentex_client=agentex_client,
+ stream_repository=stream_repository,
+ )
+ tracer = AsyncTracer(agentex_client)
+ self._openai_service = OpenAIService(
+ agentex_client=agentex_client,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ else:
+ self._openai_service = openai_service
+
+ async def run_agent(
+ self,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=600),
+ heartbeat_timeout: timedelta = timedelta(seconds=600),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ handoff_description: str | None = None,
+ handoffs: list[Agent] | None = None,
+ model: str | None = None,
+ model_settings: ModelSettings | None = None,
+ tools: list[Tool] | None = None,
+ output_type: type[Any] | AgentOutputSchemaBase | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ ) -> SerializableRunResult | RunResult:
+ """
+ Run an agent without streaming or TaskMessage creation.
+
+ DEFAULT: No TaskMessage creation, returns only the result.
+
+ Args:
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span for tracing.
+ start_to_close_timeout: Maximum time allowed for the operation.
+ heartbeat_timeout: Maximum time between heartbeats.
+ retry_policy: Policy for retrying failed operations.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+
+ Returns:
+ Union[SerializableRunResult, RunResult]: SerializableRunResult when in Temporal, RunResult otherwise.
+ """
+ if in_temporal_workflow():
+ params = RunAgentParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=OpenAIActivityName.RUN_AGENT,
+ request=params,
+ response_type=SerializableRunResult,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._openai_service.run_agent(
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ )
+
+ async def run_agent_auto_send(
+ self,
+ task_id: str,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=600),
+ heartbeat_timeout: timedelta = timedelta(seconds=600),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ handoff_description: str | None = None,
+ handoffs: list[Agent] | None = None,
+ model: str | None = None,
+ model_settings: ModelSettings | None = None,
+ tools: list[Tool] | None = None,
+ output_type: type[Any] | AgentOutputSchemaBase | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ ) -> SerializableRunResult | RunResult:
+ """
+ Run an agent with automatic TaskMessage creation.
+
+ Args:
+ task_id: The ID of the task to run the agent for.
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span for tracing.
+ start_to_close_timeout: Maximum time allowed for the operation.
+ heartbeat_timeout: Maximum time between heartbeats.
+ retry_policy: Policy for retrying failed operations.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+
+ Returns:
+ Union[SerializableRunResult, RunResult]: SerializableRunResult when in Temporal, RunResult otherwise.
+ """
+ if in_temporal_workflow():
+ params = RunAgentAutoSendParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ task_id=task_id,
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=OpenAIActivityName.RUN_AGENT_AUTO_SEND,
+ request=params,
+ response_type=SerializableRunResult,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._openai_service.run_agent_auto_send(
+ task_id=task_id,
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ )
+
+ async def run_agent_streamed(
+ self,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ handoff_description: str | None = None,
+ handoffs: list[Agent] | None = None,
+ model: str | None = None,
+ model_settings: ModelSettings | None = None,
+ tools: list[Tool] | None = None,
+ output_type: type[Any] | AgentOutputSchemaBase | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ ) -> RunResultStreaming:
+ """
+ Run an agent with streaming enabled but no TaskMessage creation.
+
+ DEFAULT: No TaskMessage creation, returns only the result.
+
+ NOTE: This method does NOT work in Temporal workflows!
+ Use run_agent_streamed_auto_send() instead for Temporal workflows.
+
+ Args:
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span for tracing.
+ start_to_close_timeout: Maximum time allowed for the operation.
+ heartbeat_timeout: Maximum time between heartbeats.
+ retry_policy: Policy for retrying failed operations.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+
+ Returns:
+ RunResultStreaming: The result of the agent run with streaming.
+
+ Raises:
+ ValueError: If called from within a Temporal workflow
+ """
+ # Temporal workflows should use the auto_send variant
+ if in_temporal_workflow():
+ raise ValueError(
+ "run_agent_streamed() cannot be used in Temporal workflows. "
+ "Use run_agent_streamed_auto_send() instead, which properly handles "
+ "TaskMessage creation and streaming through the streaming service."
+ )
+
+ return await self._openai_service.run_agent_streamed(
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ )
+
+ async def run_agent_streamed_auto_send(
+ self,
+ task_id: str,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=600),
+ heartbeat_timeout: timedelta = timedelta(seconds=600),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ handoff_description: str | None = None,
+ handoffs: list[Agent] | None = None,
+ model: str | None = None,
+ model_settings: ModelSettings | None = None,
+ tools: list[Tool] | None = None,
+ output_type: type[Any] | AgentOutputSchemaBase | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ mcp_timeout_seconds: int | None = None,
+ ) -> SerializableRunResultStreaming | RunResultStreaming:
+ """
+ Run an agent with streaming enabled and automatic TaskMessage creation.
+
+ Args:
+ task_id: The ID of the task to run the agent for.
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span for tracing.
+ start_to_close_timeout: Maximum time allowed for the operation.
+ heartbeat_timeout: Maximum time between heartbeats.
+ retry_policy: Policy for retrying failed operations.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+ mcp_timeout_seconds: Optional param to set the timeout threshold for the MCP servers. Defaults to 5 seconds.
+
+ Returns:
+ Union[SerializableRunResultStreaming, RunResultStreaming]: SerializableRunResultStreaming when in Temporal, RunResultStreaming otherwise.
+ """
+ if in_temporal_workflow():
+ params = RunAgentStreamedAutoSendParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ task_id=task_id,
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ mcp_timeout_seconds=mcp_timeout_seconds,
+ )
+ return await ActivityHelpers.execute_activity(
+ activity_name=OpenAIActivityName.RUN_AGENT_STREAMED_AUTO_SEND,
+ request=params,
+ response_type=SerializableRunResultStreaming,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._openai_service.run_agent_streamed_auto_send(
+ task_id=task_id,
+ input_list=input_list,
+ mcp_server_params=mcp_server_params,
+ agent_name=agent_name,
+ agent_instructions=agent_instructions,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ handoff_description=handoff_description,
+ handoffs=handoffs,
+ model=model,
+ model_settings=model_settings,
+ tools=tools,
+ output_type=output_type,
+ tool_use_behavior=tool_use_behavior,
+ mcp_timeout_seconds=mcp_timeout_seconds,
+ )
diff --git a/src/agentex/lib/adk/providers/_modules/sgp.py b/src/agentex/lib/adk/providers/_modules/sgp.py
new file mode 100644
index 000000000..66ccf37c4
--- /dev/null
+++ b/src/agentex/lib/adk/providers/_modules/sgp.py
@@ -0,0 +1,85 @@
+from datetime import timedelta
+
+from scale_gp import SGPClient, SGPClientError
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.providers.sgp import SGPService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.providers.sgp_activities import (
+ DownloadFileParams,
+ FileContentResponse,
+ SGPActivityName,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class SGPModule:
+ """
+ Module for managing SGP agent operations in Agentex.
+ Provides high-level methods for chat completion, streaming, agentic streaming, and message classification.
+ """
+
+ def __init__(
+ self,
+ sgp_service: SGPService | None = None,
+ ):
+ if sgp_service is None:
+ try:
+ sgp_client = SGPClient()
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._sgp_service = SGPService(sgp_client=sgp_client, tracer=tracer)
+ except SGPClientError:
+ self._sgp_service = None
+ else:
+ self._sgp_service = sgp_service
+
+ async def download_file_content(
+ self,
+ params: DownloadFileParams,
+ start_to_close_timeout: timedelta = timedelta(seconds=30),
+ heartbeat_timeout: timedelta = timedelta(seconds=30),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> FileContentResponse:
+ """
+ Download the content of a file from SGP.
+
+ Args:
+ params (DownloadFileParams): The parameters for the download file content activity.
+ start_to_close_timeout (timedelta): The start to close timeout.
+ heartbeat_timeout (timedelta): The heartbeat timeout.
+ retry_policy (RetryPolicy): The retry policy.
+
+ Returns:
+ FileContentResponse: The content of the file
+ """
+ if self._sgp_service is None:
+ raise ValueError(
+ "SGP activities are disabled because the SGP client could not be initialized. Please check that the SGP_API_KEY environment variable is set."
+ )
+
+ params = DownloadFileParams(
+ file_id=params.file_id,
+ filename=params.filename,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=SGPActivityName.DOWNLOAD_FILE_CONTENT,
+ request=params,
+ response_type=FileContentResponse,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._sgp_service.download_file_content(
+ file_id=params.file_id,
+ filename=params.filename,
+ )
diff --git a/src/agentex/lib/adk/utils/__init__.py b/src/agentex/lib/adk/utils/__init__.py
new file mode 100644
index 000000000..c190cb6e7
--- /dev/null
+++ b/src/agentex/lib/adk/utils/__init__.py
@@ -0,0 +1,5 @@
+from agentex.lib.adk.utils._modules.templating import TemplatingModule
+
+__all__ = ["templating"]
+
+templating = TemplatingModule()
diff --git a/src/agentex/lib/adk/utils/_modules/__init__.py b/src/agentex/lib/adk/utils/_modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/adk/utils/_modules/templating.py b/src/agentex/lib/adk/utils/_modules/templating.py
new file mode 100644
index 000000000..8b2108c99
--- /dev/null
+++ b/src/agentex/lib/adk/utils/_modules/templating.py
@@ -0,0 +1,94 @@
+from datetime import timedelta
+from typing import Any
+
+from temporalio.common import RetryPolicy
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.utils.templating import TemplatingService
+from agentex.lib.core.temporal.activities.activity_helpers import ActivityHelpers
+from agentex.lib.core.temporal.activities.adk.utils.templating_activities import (
+ JinjaActivityName,
+ RenderJinjaParams,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import in_temporal_workflow
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+class TemplatingModule:
+ """
+ Module for managing templating operations in Agentex.
+
+ This interface provides high-level methods for rendering Jinja templates, abstracting away
+ the underlying activity and workflow execution. It supports both synchronous and asynchronous
+ (Temporal workflow) contexts.
+ """
+
+ def __init__(
+ self,
+ templating_service: TemplatingService | None = None,
+ ):
+ """
+ Initialize the templating interface.
+
+ Args:
+ templating_service (Optional[TemplatingService]): Optional pre-configured templating service. If None, will be auto-initialized.
+ """
+ if templating_service is None:
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+ self._templating_service = TemplatingService(tracer=tracer)
+ else:
+ self._templating_service = templating_service
+
+ async def render_jinja(
+ self,
+ trace_id: str,
+ template: str,
+ variables: dict[str, Any],
+ parent_span_id: str | None = None,
+ start_to_close_timeout: timedelta = timedelta(seconds=10),
+ heartbeat_timeout: timedelta = timedelta(seconds=10),
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ ) -> str:
+ """
+ Render a Jinja template.
+
+ Args:
+ trace_id (str): Unique identifier for tracing and correlation.
+ template (str): The Jinja template string to render.
+ variables (Dict[str, Any]): Variables to use in the template.
+ parent_span_id (Optional[str]): Optional parent span for tracing.
+ start_to_close_timeout (timedelta): Maximum time allowed for the operation.
+ heartbeat_timeout (timedelta): Maximum time between heartbeats.
+ retry_policy (RetryPolicy): Policy for retrying failed operations.
+
+ Returns:
+ str: The rendered template as a string.
+ """
+ render_jinja_params = RenderJinjaParams(
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ template=template,
+ variables=variables,
+ )
+ if in_temporal_workflow():
+ return await ActivityHelpers.execute_activity(
+ activity_name=JinjaActivityName.RENDER_JINJA,
+ request=render_jinja_params,
+ response_type=str,
+ start_to_close_timeout=start_to_close_timeout,
+ heartbeat_timeout=heartbeat_timeout,
+ retry_policy=retry_policy,
+ )
+ else:
+ return await self._templating_service.render_jinja(
+ template=template,
+ variables=variables,
+ trace_id=trace_id,
+ parent_span_id=parent_span_id,
+ )
diff --git a/src/agentex/lib/cli/__init__.py b/src/agentex/lib/cli/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/cli/commands/__init__.py b/src/agentex/lib/cli/commands/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/cli/commands/agents.py b/src/agentex/lib/cli/commands/agents.py
new file mode 100644
index 000000000..c3e5899e5
--- /dev/null
+++ b/src/agentex/lib/cli/commands/agents.py
@@ -0,0 +1,258 @@
+import builtins
+from pathlib import Path
+
+import questionary
+import typer
+from rich import print_json
+from rich.console import Console
+from rich.panel import Panel
+
+from agentex.lib.cli.handlers.agent_handlers import (
+ build_agent,
+ run_agent,
+)
+from agentex.lib.cli.handlers.deploy_handlers import (
+ DeploymentError,
+ HelmError,
+ InputDeployOverrides,
+ deploy_agent,
+)
+from agentex.lib.cli.utils.cli_utils import handle_questionary_cancellation
+from agentex.lib.cli.utils.kubectl_utils import (
+ check_and_switch_cluster_context,
+ validate_namespace,
+)
+from agentex import Agentex
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+agents = typer.Typer()
+
+
+@agents.command()
+def get(
+ agent_id: str = typer.Argument(..., help="ID of the agent to get"),
+):
+ """
+ Get the agent with the given name.
+ """
+ logger.info(f"Getting agent with ID: {agent_id}")
+ client = Agentex()
+ agent = client.agents.retrieve(agent_id=agent_id)
+ logger.info(f"Agent retrieved: {agent}")
+ print_json(data=agent.to_dict())
+
+
+@agents.command()
+def list():
+ """
+ List all agents.
+ """
+ logger.info("Listing all agents")
+ client = Agentex()
+ agents = client.agents.list()
+ logger.info(f"Agents retrieved: {agents}")
+ print_json(data=[agent.to_dict() for agent in agents])
+
+
+@agents.command()
+def delete(
+ agent_name: str = typer.Argument(..., help="Name of the agent to delete"),
+):
+ """
+ Delete the agent with the given name.
+ """
+ logger.info(f"Deleting agent with name: {agent_name}")
+ client = Agentex()
+ client.agents.delete_by_name(agent_name=agent_name)
+ logger.info(f"Agent deleted: {agent_name}")
+
+
+@agents.command()
+def build(
+ manifest: str = typer.Option(..., help="Path to the manifest you want to use"),
+ registry: str | None = typer.Option(
+ None, help="Registry URL for pushing the built image"
+ ),
+ repository_name: str | None = typer.Option(
+ None, help="Repository name to use for the built image"
+ ),
+ push: bool = typer.Option(False, help="Whether to push the image to the registry"),
+ secret: str | None = typer.Option(
+ None,
+ help="Docker build secret in the format 'id=secret-id,src=path-to-secret-file'",
+ ),
+ tag: str | None = typer.Option(
+ None, help="Image tag to use (defaults to 'latest')"
+ ),
+ build_arg: builtins.list[str] | None = typer.Option( # noqa: B008
+ None,
+ help="Docker build argument in the format 'KEY=VALUE' (can be used multiple times)",
+ ),
+):
+ """
+ Build an agent image locally from the given manifest.
+ """
+ typer.echo(f"Building agent image from manifest: {manifest}")
+
+ try:
+ image_url = build_agent(
+ manifest_path=manifest,
+ registry_url=registry,
+ repository_name=repository_name,
+ push=push,
+ secret=secret,
+ tag=tag,
+ build_args=build_arg,
+ )
+ if image_url:
+ typer.echo(f"Successfully built image: {image_url}")
+ else:
+ typer.echo("No registry provided, image was not built")
+ except Exception as e:
+ typer.echo(f"Error building agent image: {str(e)}", err=True)
+ logger.exception("Error building agent image")
+ raise typer.Exit(1) from e
+
+
+@agents.command()
+def run(
+ manifest: str = typer.Option(..., help="Path to the manifest you want to use"),
+):
+ """
+ Run an agent locally from the given manifest.
+ """
+ typer.echo(f"Running agent from manifest: {manifest}")
+ try:
+ run_agent(manifest_path=manifest)
+ except Exception as e:
+ typer.echo(f"Error running agent: {str(e)}", err=True)
+ logger.exception("Error running agent")
+ raise typer.Exit(1) from e
+
+
+@agents.command()
+def deploy(
+ cluster: str = typer.Option(
+ ..., help="Target cluster name (must match kubectl context)"
+ ),
+ manifest: str = typer.Option("manifest.yaml", help="Path to the manifest file"),
+ namespace: str | None = typer.Option(
+ None,
+ help="Kubernetes namespace to deploy to (required in non-interactive mode)",
+ ),
+ tag: str | None = typer.Option(None, help="Override the image tag for deployment"),
+ repository: str | None = typer.Option(
+ None, help="Override the repository for deployment"
+ ),
+ override_file: str | None = typer.Option(
+ None, help="Path to override configuration file"
+ ),
+ interactive: bool = typer.Option(
+ True, "--interactive/--no-interactive", help="Enable interactive prompts"
+ ),
+):
+ """Deploy an agent to a Kubernetes cluster using Helm"""
+
+ console.print(
+ Panel.fit("🚀 [bold blue]Deploy Agent[/bold blue]", border_style="blue")
+ )
+
+ try:
+ # Validate manifest exists
+ manifest_path = Path(manifest)
+ if not manifest_path.exists():
+ console.print(f"[red]Error:[/red] Manifest file not found: {manifest}")
+ raise typer.Exit(1)
+
+ # In non-interactive mode, require namespace
+ if not interactive and not namespace:
+ console.print(
+ "[red]Error:[/red] --namespace is required in non-interactive mode"
+ )
+ raise typer.Exit(1)
+
+ # Get namespace if not provided (only in interactive mode)
+ if not namespace:
+ namespace = questionary.text(
+ "Enter Kubernetes namespace:", default="default"
+ ).ask()
+ namespace = handle_questionary_cancellation(namespace, "namespace input")
+
+ if not namespace:
+ console.print("Deployment cancelled")
+ raise typer.Exit(0)
+
+ # Validate override file exists if provided
+ if override_file:
+ override_path = Path(override_file)
+ if not override_path.exists():
+ console.print(
+ f"[red]Error:[/red] Override file not found: {override_file}"
+ )
+ raise typer.Exit(1)
+
+ # Load manifest for credential validation
+ manifest_obj = AgentManifest.from_yaml(str(manifest_path))
+
+ # Confirm deployment (only in interactive mode)
+ console.print("\n[bold]Deployment Summary:[/bold]")
+ console.print(f" Manifest: {manifest}")
+ console.print(f" Cluster: {cluster}")
+ console.print(f" Namespace: {namespace}")
+ if tag:
+ console.print(f" Image Tag: {tag}")
+ if override_file:
+ console.print(f" Override File: {override_file}")
+
+ if interactive:
+ proceed = questionary.confirm("Proceed with deployment?").ask()
+ proceed = handle_questionary_cancellation(
+ proceed, "deployment confirmation"
+ )
+
+ if not proceed:
+ console.print("Deployment cancelled")
+ raise typer.Exit(0)
+ else:
+ console.print("Proceeding with deployment (non-interactive mode)")
+
+ check_and_switch_cluster_context(cluster)
+ if not validate_namespace(namespace, cluster):
+ console.print(
+ f"[red]Error:[/red] Namespace '{namespace}' does not exist in cluster '{cluster}'"
+ )
+ raise typer.Exit(1)
+
+ deploy_overrides = InputDeployOverrides(repository=repository, image_tag=tag)
+
+ # Deploy agent
+ deploy_agent(
+ manifest_path=str(manifest_path),
+ cluster_name=cluster,
+ namespace=namespace,
+ deploy_overrides=deploy_overrides,
+ override_file_path=override_file,
+ )
+
+ # Use the already loaded manifest object
+ release_name = f"{manifest_obj.agent.name}-{cluster}"
+
+ console.print(
+ "\n[bold green]🎉 Deployment completed successfully![/bold green]"
+ )
+ console.print("\nTo check deployment status:")
+ console.print(f" kubectl get pods -n {namespace}")
+ console.print(f" helm status {release_name} -n {namespace}")
+
+ except (DeploymentError, HelmError) as e:
+ console.print(f"[red]Deployment failed:[/red] {str(e)}")
+ logger.exception("Deployment failed")
+ raise typer.Exit(1) from e
+ except Exception as e:
+ console.print(f"[red]Unexpected error:[/red] {str(e)}")
+ logger.exception("Unexpected error during deployment")
+ raise typer.Exit(1) from e
diff --git a/src/agentex/lib/cli/commands/init.py b/src/agentex/lib/cli/commands/init.py
new file mode 100644
index 000000000..bf8ec9d2c
--- /dev/null
+++ b/src/agentex/lib/cli/commands/init.py
@@ -0,0 +1,241 @@
+from enum import Enum
+from pathlib import Path
+
+import questionary
+from jinja2 import Environment, FileSystemLoader
+from rich.console import Console
+from rich.panel import Panel
+from rich.table import Table
+
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+# Get the templates directory relative to this file
+TEMPLATES_DIR = Path(__file__).parent.parent / "templates"
+
+
+class TemplateType(str, Enum):
+ TEMPORAL = "temporal"
+ DEFAULT = "default"
+ SYNC = "sync"
+
+
+def render_template(
+ template_path: str, context: dict, template_type: TemplateType
+) -> str:
+ """Render a template with the given context"""
+ env = Environment(loader=FileSystemLoader(TEMPLATES_DIR / template_type.value))
+ template = env.get_template(template_path)
+ return template.render(**context)
+
+
+def create_project_structure(
+ path: Path, context: dict, template_type: TemplateType, use_uv: bool
+):
+ """Create the project structure from templates"""
+ # Create project directory
+ project_dir: Path = path / context["project_name"]
+ project_dir.mkdir(parents=True, exist_ok=True)
+
+ # Create project/code directory
+ code_dir: Path = project_dir / "project"
+ code_dir.mkdir(parents=True, exist_ok=True)
+
+ # Create __init__.py
+ (code_dir / "__init__.py").touch()
+
+ # Define project files based on template type
+ project_files = {
+ TemplateType.TEMPORAL: ["acp.py", "workflow.py", "run_worker.py"],
+ TemplateType.DEFAULT: ["acp.py"],
+ TemplateType.SYNC: ["acp.py"],
+ }[template_type]
+
+ # Create project/code files
+ for template in project_files:
+ template_path = f"project/{template}.j2"
+ output_path = code_dir / template
+ output_path.write_text(render_template(template_path, context, template_type))
+
+ # Create root files
+ root_templates = {
+ ".dockerignore.j2": ".dockerignore",
+ "manifest.yaml.j2": "manifest.yaml",
+ "README.md.j2": "README.md",
+ }
+
+ # Add package management file based on uv choice
+ if use_uv:
+ root_templates["pyproject.toml.j2"] = "pyproject.toml"
+ root_templates["Dockerfile-uv.j2"] = "Dockerfile"
+ else:
+ root_templates["requirements.txt.j2"] = "requirements.txt"
+ root_templates["Dockerfile.j2"] = "Dockerfile"
+
+ for template, output in root_templates.items():
+ output_path = project_dir / output
+ output_path.write_text(render_template(template, context, template_type))
+
+ console.print(f"\n[green]✓[/green] Created project structure at: {project_dir}")
+
+
+def get_project_context(answers: dict, project_path: Path, manifest_root: Path) -> dict:
+ """Get the project context from user answers"""
+ # Use agent_directory_name as project_name
+ project_name = answers["agent_directory_name"].replace("-", "_")
+
+ # Now, this is actually the exact same as the project_name because we changed the build root to be ../
+ project_path_from_build_root = project_name
+
+ # Calculate the agentex-sdk path relative to the project
+ agentex_py_path = Path(__file__).parent.parent.parent.parent.resolve()
+ try:
+ agentex_py_path_relative = agentex_py_path.relative_to(project_path.resolve())
+ agentex_py_path_str = str(agentex_py_path_relative)
+ except ValueError:
+ # If agentex-sdk is not under the project path, use absolute path
+ agentex_py_path_str = str(agentex_py_path)
+ logger.warning(
+ f"agentex-sdk path {agentex_py_path} is not under project path {project_path}. "
+ f"Using absolute path for editable install."
+ )
+
+ return {
+ **answers,
+ "project_name": project_name,
+ "workflow_class": "".join(
+ word.capitalize() for word in answers["agent_name"].split("-")
+ )
+ + "Workflow",
+ "workflow_name": answers["agent_name"],
+ "queue_name": project_name + "_queue",
+ "project_path_from_build_root": project_path_from_build_root,
+ "agentex_py_path": agentex_py_path_str,
+ }
+
+
+def init():
+ """Initialize a new agent project"""
+ console.print(
+ Panel.fit(
+ "🤖 [bold blue]Initialize New Agent Project[/bold blue]",
+ border_style="blue",
+ )
+ )
+
+ # Use a Rich table for template descriptions
+ table = Table(show_header=True, header_style="bold magenta")
+ table.add_column("Template", style="cyan", no_wrap=True)
+ table.add_column("Description", style="white")
+ table.add_row(
+ "[bold cyan]Agentic - ACP Only[/bold cyan]",
+ "A simple synchronous agent that handles tasks directly. Best for straightforward agents that don't need long-running operations.",
+ )
+ table.add_row(
+ "[bold cyan]Agentic - Temporal[/bold cyan]",
+ "An asynchronous agent powered by Temporal workflows. Best for agents that need to handle long-running tasks, retries, or complex state management.",
+ )
+ table.add_row(
+ "[bold cyan]Sync ACP[/bold cyan]",
+ "A synchronous agent that handles tasks directly. The difference is that this Sync ACP will be required to respond with the results in the same call as the input.Best for straightforward agents that don't need long-running operations.",
+ )
+ console.print()
+ console.print(table)
+ console.print()
+
+ def validate_agent_name(text: str) -> bool | str:
+ """Validate agent name follows required format"""
+ is_valid = len(text) >= 1 and text.replace("-", "").isalnum() and text.islower()
+ if not is_valid:
+ return "Invalid name. Use only lowercase letters, numbers, and hyphens. Examples: 'my-agent', 'newsbot'"
+ return True
+
+ # Gather project information
+ template_type = questionary.select(
+ "What type of template would you like to create?",
+ choices=[
+ {"name": "Agentic - ACP Only", "value": TemplateType.DEFAULT},
+ {"name": "Agentic - Temporal", "value": TemplateType.TEMPORAL},
+ {"name": "Sync ACP", "value": TemplateType.SYNC},
+ ],
+ ).ask()
+ if not template_type:
+ return
+
+ project_path = questionary.path(
+ "Where would you like to create your project?", default="."
+ ).ask()
+ if not project_path:
+ return
+
+ agent_name = questionary.text(
+ "What's your agent name? (letters, numbers, and hyphens only)",
+ validate=validate_agent_name,
+ ).ask()
+ if not agent_name:
+ return
+
+ agent_directory_name = questionary.text(
+ "What do you want to name the project folder for your agent?",
+ default=agent_name,
+ ).ask()
+ if not agent_directory_name:
+ return
+
+ description = questionary.text(
+ "Provide a brief description of your agent:", default="An AgentEx agent"
+ ).ask()
+ if not description:
+ return
+
+ use_uv = questionary.select(
+ "Would you like to use uv for package management?",
+ choices=[
+ {"name": "Yes (Recommended)", "value": True},
+ {"name": "No", "value": False},
+ ],
+ ).ask()
+
+ answers = {
+ "template_type": template_type,
+ "project_path": project_path,
+ "agent_name": agent_name,
+ "agent_directory_name": agent_directory_name,
+ "description": description,
+ "use_uv": use_uv,
+ }
+
+ # Derive all names from agent_directory_name and path
+ project_path = Path(answers["project_path"]).resolve()
+ manifest_root = Path("../../")
+
+ # Get project context
+ context = get_project_context(answers, project_path, manifest_root)
+ context["template_type"] = answers["template_type"].value
+ context["use_uv"] = answers["use_uv"]
+
+ # Create project structure
+ create_project_structure(
+ project_path, context, answers["template_type"], answers["use_uv"]
+ )
+
+ # Show next steps
+ console.print("\n[bold green]✨ Project created successfully![/bold green]")
+ console.print("\n[bold]Next steps:[/bold]")
+ console.print(f"1. cd {project_path}/{context['project_name']}")
+ console.print("2. Review and customize the generated files")
+ console.print("3. Update the container registry in manifest.yaml")
+
+ if answers["template_type"] == TemplateType.TEMPORAL:
+ console.print("4. Run locally:")
+ console.print(" agentex agents run --manifest manifest.yaml")
+ else:
+ console.print("4. Run locally:")
+ console.print(" agentex agents run --manifest manifest.yaml")
+
+ console.print("5. Deploy your agent:")
+ console.print(
+ " agentex agents deploy --cluster your-cluster --namespace your-namespace"
+ )
diff --git a/src/agentex/lib/cli/commands/main.py b/src/agentex/lib/cli/commands/main.py
new file mode 100644
index 000000000..608634304
--- /dev/null
+++ b/src/agentex/lib/cli/commands/main.py
@@ -0,0 +1,33 @@
+import typer
+
+from agentex.lib.cli.commands.agents import agents
+from agentex.lib.cli.commands.init import init
+from agentex.lib.cli.commands.secrets import secrets
+from agentex.lib.cli.commands.tasks import tasks
+from agentex.lib.cli.commands.uv import uv
+
+# Create the main Typer application
+app = typer.Typer(
+ context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 800},
+ pretty_exceptions_show_locals=False,
+ pretty_exceptions_enable=False,
+ add_completion=False,
+)
+
+# Add the subcommands
+app.add_typer(agents, name="agents", help="Get, list, run, build, and deploy agents")
+app.add_typer(tasks, name="tasks", help="Get, list, and delete tasks")
+app.add_typer(secrets, name="secrets", help="Sync, get, list, and delete secrets")
+app.add_typer(
+ uv, name="uv", help="Wrapper for uv command with AgentEx-specific enhancements"
+)
+
+# Add init command with documentation
+app.command(
+ help="Initialize a new agent project with a template",
+ epilog="Example: agentex init --template temporal my-agent",
+)(init)
+
+
+if __name__ == "__main__":
+ app()
diff --git a/src/agentex/lib/cli/commands/secrets.py b/src/agentex/lib/cli/commands/secrets.py
new file mode 100644
index 000000000..2aa671b4d
--- /dev/null
+++ b/src/agentex/lib/cli/commands/secrets.py
@@ -0,0 +1,169 @@
+from pathlib import Path
+
+import questionary
+import typer
+from rich import print_json
+from rich.console import Console
+from rich.panel import Panel
+
+from agentex.lib.cli.handlers.secret_handlers import (
+ delete_secret,
+ get_kubernetes_secrets_by_type,
+ get_secret,
+ sync_secrets,
+)
+from agentex.lib.cli.utils.cli_utils import handle_questionary_cancellation
+from agentex.lib.cli.utils.kubectl_utils import (
+ check_and_switch_cluster_context,
+ validate_namespace,
+)
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+secrets = typer.Typer()
+
+
+@secrets.command()
+def list(
+ namespace: str = typer.Option(
+ "agentex-agents", help="Kubernetes namespace to list secrets from"
+ ),
+ cluster: str | None = typer.Option(
+ None, help="Cluster context to use (defaults to current context)"
+ ),
+):
+ """List names of available secrets"""
+ logger.info(f"Listing secrets in namespace: {namespace}")
+
+ if cluster:
+ check_and_switch_cluster_context(cluster)
+ if not validate_namespace(namespace, cluster):
+ console.print(
+ f"[red]Error:[/red] Namespace '{namespace}' does not exist in cluster '{cluster}'"
+ )
+ raise typer.Exit(1)
+
+ secrets_list = get_kubernetes_secrets_by_type(namespace=namespace, context=cluster)
+ print_json(data=secrets_list)
+
+
+@secrets.command()
+def get(
+ name: str = typer.Argument(..., help="Name of the secret to get"),
+ namespace: str = typer.Option(
+ "agentex-agents", help="Kubernetes namespace for the secret"
+ ),
+ cluster: str | None = typer.Option(
+ None, help="Cluster context to use (defaults to current context)"
+ ),
+):
+ """Get details about a secret"""
+ logger.info(f"Getting secret: {name} from namespace: {namespace}")
+
+ if cluster:
+ check_and_switch_cluster_context(cluster)
+ if not validate_namespace(namespace, cluster):
+ console.print(
+ f"[red]Error:[/red] Namespace '{namespace}' does not exist in cluster '{cluster}'"
+ )
+ raise typer.Exit(1)
+
+ secret = get_secret(name=name, namespace=namespace, context=cluster)
+ print_json(data=secret)
+
+
+@secrets.command()
+def delete(
+ name: str = typer.Argument(..., help="Name of the secret to delete"),
+ namespace: str = typer.Option(
+ "agentex-agents", help="Kubernetes namespace for the secret"
+ ),
+ cluster: str | None = typer.Option(
+ None, help="Cluster context to use (defaults to current context)"
+ ),
+):
+ """Delete a secret"""
+ logger.info(f"Deleting secret: {name} from namespace: {namespace}")
+
+ if cluster:
+ check_and_switch_cluster_context(cluster)
+ if not validate_namespace(namespace, cluster):
+ console.print(
+ f"[red]Error:[/red] Namespace '{namespace}' does not exist in cluster '{cluster}'"
+ )
+ raise typer.Exit(1)
+
+ delete_secret(name=name, namespace=namespace, context=cluster)
+
+
+@secrets.command()
+def sync(
+ manifest: str = typer.Option(..., help="Path to the manifest file"),
+ # TODO: should cluster be here or be in manifest as well?
+ cluster: str = typer.Option(..., "--cluster", help="Cluster to sync secrets to"),
+ interactive: bool = typer.Option(
+ True, "--interactive/--no-interactive", help="Enable interactive prompts"
+ ),
+ namespace: str | None = typer.Option(
+ None,
+ help="Kubernetes namespace to deploy to (required in non-interactive mode)",
+ ),
+ values: str = typer.Option(None, "--values", help="Path to the values file"),
+):
+ """Sync secrets from the cluster to the local environment"""
+ console.print(
+ Panel.fit("🚀 [bold blue]Sync Secrets[/bold blue]", border_style="blue")
+ )
+
+ manifest_path = Path(manifest)
+ if not manifest_path.exists():
+ console.print(f"[red]Error:[/red] Manifest file not found: {manifest}")
+ raise typer.Exit(1)
+
+ # In non-interactive mode, require namespace
+ if not interactive and not namespace:
+ console.print(
+ "[red]Error:[/red] --namespace is required in non-interactive mode"
+ )
+ raise typer.Exit(1)
+
+ # Get namespace if not provided (only in interactive mode)
+ if not namespace:
+ namespace = questionary.text(
+ "Enter Kubernetes namespace:", default="default"
+ ).ask()
+ namespace = handle_questionary_cancellation(namespace, "namespace input")
+
+ if not namespace:
+ console.print("Deployment cancelled")
+ raise typer.Exit(0)
+
+ if values:
+ values_path = Path(values)
+ if not values_path.exists():
+ console.print(f"[red]Error:[/red] Values file not found: {values_path}")
+ raise typer.Exit(1)
+
+ # Validate cluster and namespace
+ check_and_switch_cluster_context(cluster)
+ if not validate_namespace(namespace, cluster):
+ console.print(
+ f"[red]Error:[/red] Namespace '{namespace}' does not exist in cluster '{cluster}'"
+ )
+ raise typer.Exit(1)
+
+ agent_manifest = AgentManifest.from_yaml(file_path=manifest)
+
+ # Always call sync_secrets - it will handle the case of no credentials
+ sync_secrets(
+ manifest_obj=agent_manifest,
+ cluster=cluster,
+ namespace=namespace,
+ interactive=interactive,
+ values_path=str(values) if values else None,
+ )
+
+ console.print("[green]Successfully synced secrets[/green]")
diff --git a/src/agentex/lib/cli/commands/tasks.py b/src/agentex/lib/cli/commands/tasks.py
new file mode 100644
index 000000000..55fc127e8
--- /dev/null
+++ b/src/agentex/lib/cli/commands/tasks.py
@@ -0,0 +1,46 @@
+import typer
+from rich import print_json
+
+from agentex import Agentex
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+tasks = typer.Typer()
+
+
+@tasks.command()
+def get(
+ task_id: str = typer.Argument(..., help="ID of the task to get"),
+):
+ """
+ Get the task with the given ID.
+ """
+ logger.info(f"Getting task: {task_id}")
+ client = Agentex()
+ task = client.tasks.retrieve(task_id=task_id)
+ print(f"Full Task {task_id}:")
+ print_json(data=task.to_dict())
+
+
+@tasks.command()
+def list():
+ """
+ List all tasks.
+ """
+ client = Agentex()
+ tasks = client.tasks.list()
+ print_json(data=[task.to_dict() for task in tasks])
+
+
+@tasks.command()
+def delete(
+ task_id: str = typer.Argument(..., help="ID of the task to delete"),
+):
+ """
+ Delete the task with the given ID.
+ """
+ logger.info(f"Deleting task: {task_id}")
+ client = Agentex()
+ client.tasks.delete(task_id=task_id)
+ logger.info(f"Task deleted: {task_id}")
diff --git a/src/agentex/lib/cli/commands/uv.py b/src/agentex/lib/cli/commands/uv.py
new file mode 100644
index 000000000..bb0b1c45c
--- /dev/null
+++ b/src/agentex/lib/cli/commands/uv.py
@@ -0,0 +1,197 @@
+import os
+import subprocess
+import sys
+
+import typer
+
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+def get_codeartifact_index_url() -> str | None:
+ """Get CodeArtifact index URL with authentication token"""
+ try:
+ # CodeArtifact configuration
+ CODEARTIFACT_DOMAIN = "scale"
+ CODEARTIFACT_OWNER = "307185671274"
+ CODEARTIFACT_REGION = "us-west-2"
+ CODEARTIFACT_REPO = "scale-pypi"
+
+ # Fetch the authentication token
+ result = subprocess.run(
+ [
+ "aws",
+ "codeartifact",
+ "get-authorization-token",
+ "--domain",
+ CODEARTIFACT_DOMAIN,
+ "--domain-owner",
+ CODEARTIFACT_OWNER,
+ "--region",
+ CODEARTIFACT_REGION,
+ "--query",
+ "authorizationToken",
+ "--output",
+ "text",
+ ],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+
+ token = result.stdout.strip()
+ if token:
+ index_url = f"https://aws:{token}@{CODEARTIFACT_DOMAIN}-{CODEARTIFACT_OWNER}.d.codeartifact.{CODEARTIFACT_REGION}.amazonaws.com/pypi/{CODEARTIFACT_REPO}/simple/"
+ logger.info("Successfully obtained CodeArtifact token")
+ return index_url
+ else:
+ logger.warning("Failed to obtain CodeArtifact token")
+ return None
+
+ except subprocess.CalledProcessError as e:
+ logger.warning(f"Failed to fetch CodeArtifact token: {e}")
+ return None
+ except FileNotFoundError:
+ logger.warning("AWS CLI not found. Install it to use CodeArtifact integration.")
+ return None
+
+
+uv = typer.Typer(
+ help="Wrapper for uv command with AgentEx-specific enhancements",
+ context_settings={"help_option_names": ["-h", "--help"]},
+)
+
+sync_args = typer.Argument(None, help="Additional arguments to pass to uv sync")
+
+
+@uv.command()
+def sync(
+ ctx: typer.Context,
+ index: str | None = typer.Option(
+ None, "--index", "-i", help="UV index URL to use for sync"
+ ),
+ group: str | None = typer.Option(
+ None,
+ "--group",
+ "-g",
+ help="Include dependencies from the specified dependency group",
+ ),
+ args: list[str] = sync_args,
+):
+ """Sync dependencies with optional UV_INDEX support"""
+ args = args or []
+
+ # Check if help was requested
+ if "--help" in args or "-h" in args:
+ # Show our custom help instead of passing to uv
+ typer.echo(ctx.get_help())
+ return
+
+ if index:
+ os.environ["UV_INDEX_URL"] = index
+ logger.info(f"Using provided UV_INDEX_URL: {index}")
+ else:
+ # Try to get CodeArtifact index URL if no index provided
+ codeartifact_url = get_codeartifact_index_url()
+ if codeartifact_url:
+ os.environ["UV_INDEX_URL"] = codeartifact_url
+ logger.info("Using CodeArtifact UV_INDEX_URL")
+ else:
+ logger.info("No index URL provided, using default PyPI")
+
+ # Build the uv sync command
+ cmd = ["uv", "sync"]
+
+ # Add group if specified
+ if group:
+ cmd.extend(["--group", group])
+ logger.info(f"Using dependency group: {group}")
+
+ # Add any additional arguments
+ cmd.extend(args)
+
+ try:
+ result = subprocess.run(cmd, check=True)
+ sys.exit(result.returncode)
+ except subprocess.CalledProcessError as e:
+ logger.error(f"uv sync failed with exit code {e.returncode}")
+ sys.exit(e.returncode)
+ except FileNotFoundError:
+ logger.error("uv command not found. Please install uv first.")
+ sys.exit(1)
+
+
+add_args = typer.Argument(None, help="Additional arguments to pass to uv add")
+
+
+@uv.command()
+def add(
+ ctx: typer.Context,
+ index: str | None = typer.Option(
+ None, "--index", "-i", help="UV index URL to use for add"
+ ),
+ args: list[str] = add_args,
+):
+ """Add dependencies with optional UV_INDEX support"""
+
+ args = args or []
+
+ # Check if help was requested
+ if "--help" in args or "-h" in args:
+ # Show our custom help instead of passing to uv
+ typer.echo(ctx.get_help())
+ return
+
+ if index:
+ os.environ["UV_INDEX_URL"] = index
+ logger.info(f"Using provided UV_INDEX_URL: {index}")
+ else:
+ # Try to get CodeArtifact index URL if no index provided
+ codeartifact_url = get_codeartifact_index_url()
+ if codeartifact_url:
+ os.environ["UV_INDEX_URL"] = codeartifact_url
+ logger.info("Using CodeArtifact UV_INDEX_URL")
+ else:
+ logger.info("No index URL provided, using default PyPI")
+
+ # Build the uv add command
+ cmd = ["uv", "add"] + (args or [])
+
+ try:
+ result = subprocess.run(cmd, check=True)
+ sys.exit(result.returncode)
+ except subprocess.CalledProcessError as e:
+ logger.error(f"uv add failed with exit code {e.returncode}")
+ sys.exit(e.returncode)
+ except FileNotFoundError:
+ logger.error("uv command not found. Please install uv first.")
+ sys.exit(1)
+
+
+run_args = typer.Argument(None, help="Arguments to pass to uv")
+
+
+@uv.command()
+def run(
+ ctx: typer.Context,
+ args: list[str] = run_args,
+):
+ """Run any uv command with arguments"""
+ if not args:
+ # If no arguments provided, show help
+ typer.echo(ctx.get_help())
+ return
+
+ # Build the uv command
+ cmd = ["uv"] + args
+
+ try:
+ result = subprocess.run(cmd, check=True)
+ sys.exit(result.returncode)
+ except subprocess.CalledProcessError as e:
+ logger.error(f"uv command failed with exit code {e.returncode}")
+ sys.exit(e.returncode)
+ except FileNotFoundError:
+ logger.error("uv command not found. Please install uv first.")
+ sys.exit(1)
diff --git a/src/agentex/lib/cli/handlers/__init__.py b/src/agentex/lib/cli/handlers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/cli/handlers/agent_handlers.py b/src/agentex/lib/cli/handlers/agent_handlers.py
new file mode 100644
index 000000000..2e95dba73
--- /dev/null
+++ b/src/agentex/lib/cli/handlers/agent_handlers.py
@@ -0,0 +1,135 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from python_on_whales import DockerException, docker
+from rich.console import Console
+
+from agentex.lib.cli.handlers.run_handlers import RunError
+from agentex.lib.cli.handlers.run_handlers import run_agent as _run_agent
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+
+class DockerBuildError(Exception):
+ """An error occurred during docker build"""
+
+
+def build_agent(
+ manifest_path: str,
+ registry_url: str,
+ repository_name: str,
+ push: bool = False,
+ secret: str = None,
+ tag: str = None,
+ build_args: list[str] = None,
+) -> str:
+ """Build the agent locally and optionally push to registry
+
+ Args:
+ manifest_path: Path to the agent manifest file
+ registry_url: Registry URL for pushing the image
+ push: Whether to push the image to the registry
+ secret: Docker build secret in format 'id=secret-id,src=path-to-secret-file'
+ tag: Image tag to use (defaults to 'latest')
+ build_args: List of Docker build arguments in format 'KEY=VALUE'
+
+ Returns:
+ The image URL
+ """
+ agent_manifest = AgentManifest.from_yaml(file_path=manifest_path)
+ build_context_root = (
+ Path(manifest_path).parent / agent_manifest.build.context.root
+ ).resolve()
+
+ repository_name = repository_name or agent_manifest.agent.name
+
+ # Prepare image name
+ if registry_url:
+ image_name = f"{registry_url}/{repository_name}"
+ else:
+ image_name = repository_name
+
+ if tag:
+ image_name = f"{image_name}:{tag}"
+ else:
+ image_name = f"{image_name}:latest"
+
+ with agent_manifest.context_manager(build_context_root) as build_context:
+ logger.info(f"Building image {image_name} locally...")
+
+ # Log build context information for debugging
+ logger.info(f"Build context path: {build_context.path}")
+ logger.info(
+ f"Dockerfile path: {build_context.path / build_context.dockerfile_path}"
+ )
+
+ try:
+ # Prepare build arguments
+ docker_build_kwargs = {
+ "context_path": str(build_context.path),
+ "file": str(build_context.path / build_context.dockerfile_path),
+ "tags": [image_name],
+ "platforms": ["linux/amd64"],
+ }
+
+ # Add Docker build args if provided
+ if build_args:
+ docker_build_args = {}
+ for arg in build_args:
+ if "=" in arg:
+ key, value = arg.split("=", 1)
+ docker_build_args[key] = value
+ else:
+ logger.warning(
+ f"Invalid build arg format: {arg}. Expected KEY=VALUE"
+ )
+
+ if docker_build_args:
+ docker_build_kwargs["build_args"] = docker_build_args
+ logger.info(f"Using build args: {list(docker_build_args.keys())}")
+
+ # Add secret if provided
+ if secret:
+ docker_build_kwargs["secrets"] = [secret]
+
+ if push:
+ # Build and push in one step for multi-platform builds
+ logger.info("Building and pushing image...")
+ docker_build_kwargs["push"] = (
+ True # Push directly after build for multi-platform
+ )
+ docker.buildx.build(**docker_build_kwargs)
+
+ logger.info(f"Successfully built and pushed {image_name}")
+ else:
+ # Build only
+ logger.info("Building image...")
+ docker.buildx.build(**docker_build_kwargs)
+
+ logger.info(f"Successfully built {image_name}")
+
+ except DockerException as error:
+ error_msg = error.stderr if error.stderr else str(error)
+ action = "build or push" if push else "build"
+ logger.error(f"{action.capitalize()} failed: {error_msg}", exc_info=True)
+ raise DockerBuildError(
+ f"Docker {action} failed: {error_msg}\n"
+ f"Build context: {build_context.path}\n"
+ f"Dockerfile path: {build_context.dockerfile_path}"
+ ) from error
+
+ return image_name
+
+
+def run_agent(manifest_path: str):
+ """Run an agent locally from the given manifest"""
+ import asyncio
+
+ try:
+ asyncio.run(_run_agent(manifest_path))
+ except RunError as e:
+ raise RuntimeError(str(e)) from e
diff --git a/src/agentex/lib/cli/handlers/deploy_handlers.py b/src/agentex/lib/cli/handlers/deploy_handlers.py
new file mode 100644
index 000000000..feaf6b0d9
--- /dev/null
+++ b/src/agentex/lib/cli/handlers/deploy_handlers.py
@@ -0,0 +1,351 @@
+import os
+import subprocess
+import tempfile
+from pathlib import Path
+from typing import Any
+
+import yaml
+from pydantic import BaseModel, Field
+from rich.console import Console
+
+from agentex.lib.cli.utils.exceptions import DeploymentError, HelmError
+from agentex.lib.cli.utils.kubectl_utils import check_and_switch_cluster_context
+from agentex.lib.sdk.config.agent_config import AgentConfig
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.sdk.config.deployment_config import ClusterConfig
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+TEMPORAL_WORKER_KEY = "temporal-worker"
+AGENTEX_AGENTS_HELM_CHART_VERSION = "0.1.2-v2-beta"
+
+
+class InputDeployOverrides(BaseModel):
+ repository: str | None = Field(
+ default=None, description="Override the repository for deployment"
+ )
+ image_tag: str | None = Field(
+ default=None, description="Override the image tag for deployment"
+ )
+
+
+def check_helm_installed() -> bool:
+ """Check if helm is installed and available"""
+ try:
+ result = subprocess.run(
+ ["helm", "version", "--short"], capture_output=True, text=True, check=True
+ )
+ logger.info(f"Helm version: {result.stdout.strip()}")
+ return True
+ except (subprocess.CalledProcessError, FileNotFoundError):
+ return False
+
+
+def add_helm_repo() -> None:
+ """Add the agentex helm repository if not already added"""
+ try:
+ # Check if repo already exists
+ result = subprocess.run(
+ ["helm", "repo", "list"], capture_output=True, text=True, check=True
+ )
+
+ if "scale-egp" not in result.stdout:
+ console.print("Adding agentex helm repository...")
+ subprocess.run(
+ [
+ "helm",
+ "repo",
+ "add",
+ "scale-egp",
+ "https://scale-egp-helm-charts-us-west-2.s3.amazonaws.com/charts",
+ ],
+ check=True,
+ )
+ else:
+ logger.info("Helm repository already exists. Running update...")
+
+ subprocess.run(["helm", "repo", "update"], check=True)
+ console.print("[green]✓[/green] Helm repository update successfully")
+
+ except subprocess.CalledProcessError as e:
+ raise HelmError(f"Failed to add helm repository: {e}") from e
+
+
+def load_override_config(override_file_path: str | None = None) -> ClusterConfig | None:
+ """Load override configuration from specified file path"""
+ if not override_file_path:
+ return None
+
+ override_path = Path(override_file_path)
+ if not override_path.exists():
+ raise DeploymentError(f"Override file not found: {override_file_path}")
+
+ try:
+ with open(override_path) as f:
+ config_data = yaml.safe_load(f)
+ return ClusterConfig(**config_data) if config_data else None
+ except Exception as e:
+ raise DeploymentError(
+ f"Failed to load override config from {override_file_path}: {e}"
+ ) from e
+
+
+def merge_deployment_configs(
+ manifest: AgentManifest,
+ cluster_config: ClusterConfig | None,
+ deploy_overrides: InputDeployOverrides,
+) -> dict[str, Any]:
+ agent_config: AgentConfig = manifest.agent
+
+ """Merge global deployment config with cluster-specific overrides into helm values"""
+ if not manifest.deployment:
+ raise DeploymentError("No deployment configuration found in manifest")
+
+ repository = deploy_overrides.repository or manifest.deployment.image.repository
+ image_tag = deploy_overrides.image_tag or manifest.deployment.image.tag
+
+ if not repository or not image_tag:
+ raise DeploymentError("Repository and image tag are required")
+
+ # Start with global configuration
+ helm_values = {
+ "global": {
+ "image": {
+ "repository": repository,
+ "tag": image_tag,
+ "pullPolicy": "IfNotPresent",
+ },
+ "agent": {
+ "name": manifest.agent.name,
+ "description": manifest.agent.description,
+ "acp_type": manifest.agent.acp_type,
+ },
+ },
+ "replicaCount": manifest.deployment.global_config.replicaCount,
+ "resources": {
+ "requests": {
+ "cpu": manifest.deployment.global_config.resources.requests.cpu,
+ "memory": manifest.deployment.global_config.resources.requests.memory,
+ },
+ "limits": {
+ "cpu": manifest.deployment.global_config.resources.limits.cpu,
+ "memory": manifest.deployment.global_config.resources.limits.memory,
+ },
+ },
+ }
+
+ # Handle temporal configuration using new helper methods
+ if agent_config.is_temporal_agent():
+ temporal_config = agent_config.get_temporal_workflow_config()
+ if temporal_config:
+ helm_values[TEMPORAL_WORKER_KEY] = {}
+ helm_values["global"]["workflow"] = {
+ "name": temporal_config.name,
+ "taskQueue": temporal_config.queue_name,
+ }
+ helm_values[TEMPORAL_WORKER_KEY]["enabled"] = True
+
+ secret_env_vars = []
+ if agent_config.credentials:
+ for credential in agent_config.credentials:
+ secret_env_vars.append(
+ {
+ "name": credential.env_var_name,
+ "secretName": credential.secret_name,
+ "secretKey": credential.secret_key,
+ }
+ )
+
+ helm_values["secretEnvVars"] = secret_env_vars
+ if TEMPORAL_WORKER_KEY in helm_values:
+ helm_values[TEMPORAL_WORKER_KEY]["secretEnvVars"] = secret_env_vars
+
+ # Set the agent_config env vars first to the helm values and so then it can be overriden by the cluster config
+ if agent_config.env:
+ helm_values["env"] = agent_config.env
+ if TEMPORAL_WORKER_KEY in helm_values:
+ helm_values[TEMPORAL_WORKER_KEY]["env"] = agent_config.env
+
+ if manifest.deployment and manifest.deployment.imagePullSecrets:
+ pull_secrets = [
+ pull_secret.to_dict()
+ for pull_secret in manifest.deployment.imagePullSecrets
+ ]
+ helm_values["global"]["imagePullSecrets"] = pull_secrets
+ # TODO: Remove this once i bump the chart version again
+ helm_values["imagePullSecrets"] = pull_secrets
+
+ # Apply cluster-specific overrides
+ if cluster_config:
+ if cluster_config.image:
+ if cluster_config.image.repository:
+ helm_values["global"]["image"]["repository"] = (
+ cluster_config.image.repository
+ )
+ if cluster_config.image.tag:
+ helm_values["global"]["image"]["tag"] = cluster_config.image.tag
+
+ if cluster_config.replicaCount is not None:
+ helm_values["replicaCount"] = cluster_config.replicaCount
+
+ if cluster_config.resources:
+ if cluster_config.resources.requests:
+ helm_values["resources"]["requests"].update(
+ {
+ "cpu": cluster_config.resources.requests.cpu,
+ "memory": cluster_config.resources.requests.memory,
+ }
+ )
+ if cluster_config.resources.limits:
+ helm_values["resources"]["limits"].update(
+ {
+ "cpu": cluster_config.resources.limits.cpu,
+ "memory": cluster_config.resources.limits.memory,
+ }
+ )
+
+ if cluster_config.env:
+ helm_values["env"] = cluster_config.env
+
+ # Apply additional arbitrary overrides
+ if cluster_config.additional_overrides:
+ _deep_merge(helm_values, cluster_config.additional_overrides)
+
+ return helm_values
+
+
+def _deep_merge(base_dict: dict[str, Any], override_dict: dict[str, Any]) -> None:
+ """Deep merge override_dict into base_dict"""
+ for key, value in override_dict.items():
+ if (
+ key in base_dict
+ and isinstance(base_dict[key], dict)
+ and isinstance(value, dict)
+ ):
+ _deep_merge(base_dict[key], value)
+ else:
+ base_dict[key] = value
+
+
+def create_helm_values_file(helm_values: dict[str, Any]) -> str:
+ """Create a temporary helm values file"""
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
+ yaml.dump(helm_values, f, default_flow_style=False)
+ return f.name
+
+
+def deploy_agent(
+ manifest_path: str,
+ cluster_name: str,
+ namespace: str,
+ deploy_overrides: InputDeployOverrides,
+ override_file_path: str | None = None,
+) -> None:
+ """Deploy an agent using helm"""
+
+ # Validate prerequisites
+ if not check_helm_installed():
+ raise DeploymentError("Helm is not installed. Please install helm first.")
+
+ # Switch to the specified cluster context
+ check_and_switch_cluster_context(cluster_name)
+
+ manifest = AgentManifest.from_yaml(file_path=manifest_path)
+ override_config = load_override_config(override_file_path)
+
+ # Provide feedback about override configuration
+ if override_config:
+ console.print(f"[green]✓[/green] Using override config: {override_file_path}")
+ else:
+ console.print(
+ "[yellow]ℹ[/yellow] No override config specified, using global defaults"
+ )
+
+ # Add helm repository/update
+ add_helm_repo()
+
+ # Merge configurations
+ helm_values = merge_deployment_configs(manifest, override_config, deploy_overrides)
+
+ # Create values file
+ values_file = create_helm_values_file(helm_values)
+
+ try:
+ agent_name = manifest.agent.name
+ release_name = agent_name
+
+ console.print(
+ f"Deploying agent [bold]{agent_name}[/bold] to cluster [bold]{cluster_name}[/bold] in namespace [bold]{namespace}[/bold]"
+ )
+
+ # Check if release exists
+ try:
+ subprocess.run(
+ ["helm", "status", release_name, "-n", namespace],
+ capture_output=True,
+ check=True,
+ )
+
+ # Release exists, do upgrade
+ console.print("Existing deployment found, upgrading...")
+ command = [
+ "helm",
+ "upgrade",
+ release_name,
+ "scale-egp/agentex-agent",
+ "--version",
+ AGENTEX_AGENTS_HELM_CHART_VERSION,
+ "-f",
+ values_file,
+ "-n",
+ namespace,
+ "--atomic",
+ "--timeout",
+ "10m",
+ ]
+ console.print(f"[blue]ℹ[/blue] Running command: {' '.join(command)}")
+ subprocess.run(command, check=True)
+ console.print("[green]✓[/green] Agent upgraded successfully")
+
+ except subprocess.CalledProcessError:
+ # Release doesn't exist, do install
+ console.print("Installing new deployment...")
+ command = [
+ "helm",
+ "install",
+ release_name,
+ "scale-egp/agentex-agent",
+ "--version",
+ AGENTEX_AGENTS_HELM_CHART_VERSION,
+ "-f",
+ values_file,
+ "-n",
+ namespace,
+ "--create-namespace",
+ "--atomic",
+ "--timeout",
+ "10m",
+ ]
+ console.print(f"[blue]ℹ[/blue] Running command: {' '.join(command)}")
+ subprocess.run(command, check=True)
+ console.print("[green]✓[/green] Agent deployed successfully")
+
+ # Show success message with helpful commands
+ console.print("\n[green]🎉 Deployment completed successfully![/green]")
+ console.print(
+ f"[blue]Check deployment status:[/blue] helm status {release_name} -n {namespace}"
+ )
+ console.print(
+ f"[blue]View logs:[/blue] kubectl logs -l app.kubernetes.io/name=agentex-agent -n {namespace}"
+ )
+
+ except subprocess.CalledProcessError as e:
+ raise HelmError(
+ f"Helm deployment failed: {e}\n"
+ f"Note: Due to --atomic flag, any partial deployment has been automatically rolled back."
+ ) from e
+ finally:
+ # Clean up values file
+ os.unlink(values_file)
diff --git a/src/agentex/lib/cli/handlers/run_handlers.py b/src/agentex/lib/cli/handlers/run_handlers.py
new file mode 100644
index 000000000..d113d8037
--- /dev/null
+++ b/src/agentex/lib/cli/handlers/run_handlers.py
@@ -0,0 +1,355 @@
+import asyncio
+import os
+import signal
+import sys
+from pathlib import Path
+
+from rich.console import Console
+from rich.panel import Panel
+
+from agentex.lib.sdk.config.agent_config import AgentConfig
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+
+class RunError(Exception):
+ """An error occurred during agent run"""
+
+
+class ProcessManager:
+ """Manages multiple subprocesses with proper cleanup"""
+
+ def __init__(self):
+ self.processes: list[asyncio.subprocess.Process] = []
+ self.shutdown_event = asyncio.Event()
+
+ def add_process(self, process: asyncio.subprocess.Process):
+ """Add a process to be managed"""
+ self.processes.append(process)
+
+ async def wait_for_shutdown(self):
+ """Wait for shutdown signal"""
+ await self.shutdown_event.wait()
+
+ def shutdown(self):
+ """Signal shutdown and terminate all processes"""
+ self.shutdown_event.set()
+
+ async def cleanup_processes(self):
+ """Clean up all processes"""
+ if not self.processes:
+ return
+
+ console.print("\n[yellow]Shutting down processes...[/yellow]")
+
+ # Send SIGTERM to all processes
+ for process in self.processes:
+ if process.returncode is None: # Process is still running
+ try:
+ process.terminate()
+ except ProcessLookupError:
+ pass # Process already terminated
+
+ # Wait for graceful shutdown
+ try:
+ await asyncio.wait_for(
+ asyncio.gather(*[p.wait() for p in self.processes], return_exceptions=True),
+ timeout=5.0,
+ )
+ except TimeoutError:
+ # Force kill if not terminated gracefully
+ for process in self.processes:
+ if process.returncode is None:
+ try:
+ process.kill()
+ await process.wait()
+ except ProcessLookupError:
+ pass
+
+ console.print("[green]All processes stopped[/green]")
+
+
+def resolve_and_validate_path(base_path: Path, configured_path: str, file_type: str) -> Path:
+ """Resolve and validate a configured path"""
+ path_obj = Path(configured_path)
+
+ if path_obj.is_absolute():
+ # Absolute path - use as-is
+ resolved_path = path_obj
+ else:
+ # Relative path - resolve relative to manifest directory
+ resolved_path = (base_path / configured_path).resolve()
+
+ # Validate the file exists
+ if not resolved_path.exists():
+ raise RunError(
+ f"{file_type} file not found: {resolved_path}\n"
+ f" Configured path: {configured_path}\n"
+ f" Resolved from manifest: {base_path}"
+ )
+
+ # Validate it's actually a file
+ if not resolved_path.is_file():
+ raise RunError(f"{file_type} path is not a file: {resolved_path}")
+
+ return resolved_path
+
+
+def validate_path_security(resolved_path: Path, manifest_dir: Path) -> None:
+ """Basic security validation for resolved paths"""
+ try:
+ # Ensure the resolved path is accessible
+ resolved_path.resolve()
+
+ # Optional: Add warnings for paths that go too far up
+ try:
+ # Check if path goes more than 3 levels up from manifest
+ relative_to_manifest = resolved_path.relative_to(manifest_dir.parent.parent.parent)
+ if str(relative_to_manifest).startswith(".."):
+ logger.warning(
+ f"Path goes significantly outside project structure: {resolved_path}"
+ )
+ except ValueError:
+ # Path is outside the tree - that's okay, just log it
+ logger.info(f"Using path outside manifest directory tree: {resolved_path}")
+
+ except Exception as e:
+ raise RunError(f"Path resolution failed: {resolved_path} - {str(e)}") from e
+
+
+def get_file_paths(manifest: AgentManifest, manifest_path: str) -> dict[str, Path]:
+ """Get resolved file paths from manifest configuration"""
+ manifest_dir = Path(manifest_path).parent.resolve()
+
+ # Use configured paths or fall back to defaults for backward compatibility
+ if manifest.local_development and manifest.local_development.paths:
+ paths_config = manifest.local_development.paths
+
+ # Resolve ACP path
+ acp_path = resolve_and_validate_path(manifest_dir, paths_config.acp, "ACP server")
+ validate_path_security(acp_path, manifest_dir)
+
+ # Resolve worker path if specified
+ worker_path = None
+ if paths_config.worker:
+ worker_path = resolve_and_validate_path(
+ manifest_dir, paths_config.worker, "Temporal worker"
+ )
+ validate_path_security(worker_path, manifest_dir)
+ else:
+ # Backward compatibility: use old hardcoded structure
+ project_dir = manifest_dir / "project"
+ acp_path = project_dir / "acp.py"
+ worker_path = project_dir / "run_worker.py" if is_temporal_agent(manifest) else None
+
+ # Validate backward compatibility paths
+ if not acp_path.exists():
+ raise RunError(f"ACP file not found: {acp_path}")
+
+ if worker_path and not worker_path.exists():
+ raise RunError(f"Worker file not found: {worker_path}")
+
+ return {
+ "acp": acp_path,
+ "worker": worker_path,
+ "acp_dir": acp_path.parent,
+ "worker_dir": worker_path.parent if worker_path else None,
+ }
+
+
+def create_agent_environment(manifest: AgentManifest) -> dict[str, str]:
+ """Create environment variables for agent processes without modifying os.environ"""
+ # Start with current environment
+ env = dict(os.environ)
+
+ agent_config: AgentConfig = manifest.agent
+
+ # TODO: Combine this logic with the deploy_handlers so that we can reuse the env vars
+ env_vars = {
+ "ENVIRONMENT": "development",
+ "TEMPORAL_ADDRESS": "localhost:7233",
+ "REDIS_URL": "redis://localhost:6379",
+ "AGENT_NAME": manifest.agent.name,
+ "ACP_TYPE": manifest.agent.acp_type,
+ "ACP_URL": f"http://{manifest.local_development.agent.host_address}",
+ "ACP_PORT": str(manifest.local_development.agent.port),
+ }
+
+ # Add description if available
+ if manifest.agent.description:
+ env_vars["AGENT_DESCRIPTION"] = manifest.agent.description
+
+ # Add temporal-specific variables if this is a temporal agent
+ if manifest.agent.is_temporal_agent():
+ temporal_config = manifest.agent.get_temporal_workflow_config()
+ if temporal_config:
+ env_vars["WORKFLOW_NAME"] = temporal_config.name
+ env_vars["WORKFLOW_TASK_QUEUE"] = temporal_config.queue_name
+
+ if agent_config.env:
+ for key, value in agent_config.env.items():
+ env_vars[key] = value
+
+ env.update(env_vars)
+
+ return env
+
+
+async def start_acp_server(
+ acp_path: Path, port: int, env: dict[str, str]
+) -> asyncio.subprocess.Process:
+ """Start the ACP server process"""
+ cmd = [
+ sys.executable,
+ "-m",
+ "uvicorn",
+ f"{acp_path.parent.name}.acp:acp",
+ "--reload",
+ "--port",
+ str(port),
+ "--host",
+ "0.0.0.0",
+ ]
+
+ console.print(f"[blue]Starting ACP server from {acp_path} on port {port}...[/blue]")
+ return await asyncio.create_subprocess_exec(
+ *cmd,
+ cwd=acp_path.parent.parent,
+ env=env,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.STDOUT,
+ )
+
+
+async def start_temporal_worker(
+ worker_path: Path, env: dict[str, str]
+) -> asyncio.subprocess.Process:
+ """Start the temporal worker process"""
+ cmd = [sys.executable, "-m", "run_worker"]
+
+ console.print(f"[blue]Starting Temporal worker from {worker_path}...[/blue]")
+
+ return await asyncio.create_subprocess_exec(
+ *cmd,
+ cwd=worker_path.parent,
+ env=env,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.STDOUT,
+ )
+
+
+def is_temporal_agent(manifest: AgentManifest) -> bool:
+ """Check if this is a temporal agent"""
+ return manifest.agent.is_temporal_agent()
+
+
+async def stream_process_output(process: asyncio.subprocess.Process, prefix: str):
+ """Stream process output with prefix"""
+ try:
+ while True:
+ line = await process.stdout.readline()
+ if not line:
+ break
+ decoded_line = line.decode("utf-8").rstrip()
+ if decoded_line: # Only print non-empty lines
+ console.print(f"[dim]{prefix}:[/dim] {decoded_line}")
+ except Exception as e:
+ logger.debug(f"Output streaming ended for {prefix}: {e}")
+
+
+async def run_agent(manifest_path: str):
+ """Run an agent locally from the given manifest"""
+
+ # Validate manifest exists
+ manifest_file = Path(manifest_path)
+
+ if not manifest_file.exists():
+ raise RunError(f"Manifest file not found: {manifest_path}")
+
+ # Parse manifest
+ try:
+ manifest = AgentManifest.from_yaml(file_path=manifest_path)
+ except Exception as e:
+ raise RunError(f"Failed to parse manifest: {str(e)}") from e
+
+ # Get and validate file paths
+ try:
+ file_paths = get_file_paths(manifest, manifest_path)
+ except Exception as e:
+ raise RunError(str(e)) from e
+
+ # Check if temporal agent and validate worker file
+ if is_temporal_agent(manifest):
+ if not file_paths["worker"]:
+ raise RunError("Temporal agent requires a worker file path to be configured")
+
+ # Create environment for subprocesses
+ agent_env = create_agent_environment(manifest)
+
+ # Setup process manager
+ process_manager = ProcessManager()
+
+ # Setup signal handlers for graceful shutdown
+ def signal_handler(signum, frame):
+ console.print(f"\n[yellow]Received signal {signum}, shutting down...[/yellow]")
+ process_manager.shutdown()
+
+ signal.signal(signal.SIGINT, signal_handler)
+ signal.signal(signal.SIGTERM, signal_handler)
+
+ try:
+ console.print(
+ Panel.fit(
+ f"🚀 [bold blue]Running Agent: {manifest.agent.name}[/bold blue]",
+ border_style="blue",
+ )
+ )
+
+ # Start ACP server
+ acp_process = await start_acp_server(
+ file_paths["acp"], manifest.local_development.agent.port, agent_env
+ )
+ process_manager.add_process(acp_process)
+
+ # Start output streaming for ACP
+ acp_output_task = asyncio.create_task(stream_process_output(acp_process, "ACP"))
+
+ tasks = [acp_output_task]
+
+ # Start temporal worker if needed
+ if is_temporal_agent(manifest):
+ worker_process = await start_temporal_worker(file_paths["worker"], agent_env)
+ process_manager.add_process(worker_process)
+
+ # Start output streaming for worker
+ worker_output_task = asyncio.create_task(
+ stream_process_output(worker_process, "WORKER")
+ )
+ tasks.append(worker_output_task)
+
+ console.print(
+ f"\n[green]✓ Agent running at: http://localhost:{manifest.local_development.agent.port}[/green]"
+ )
+ console.print("[dim]Press Ctrl+C to stop[/dim]\n")
+
+ # Wait for shutdown signal
+ await process_manager.wait_for_shutdown()
+
+ # Cancel output streaming tasks
+ for task in tasks:
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+
+ except Exception as e:
+ logger.exception("Error running agent")
+ raise RunError(f"Failed to run agent: {str(e)}") from e
+
+ finally:
+ # Ensure cleanup happens
+ await process_manager.cleanup_processes()
diff --git a/src/agentex/lib/cli/handlers/secret_handlers.py b/src/agentex/lib/cli/handlers/secret_handlers.py
new file mode 100644
index 000000000..ef68ce5d6
--- /dev/null
+++ b/src/agentex/lib/cli/handlers/secret_handlers.py
@@ -0,0 +1,670 @@
+import base64
+import json
+from collections import defaultdict
+from pathlib import Path
+from typing import Any
+
+import questionary
+import typer
+import yaml
+from kubernetes.client.rest import ApiException
+from rich.console import Console
+
+from agentex.lib.cli.utils.cli_utils import handle_questionary_cancellation
+from agentex.lib.cli.utils.kubectl_utils import get_k8s_client
+from agentex.lib.cli.utils.kubernetes_secrets_utils import (
+ KUBERNETES_SECRET_TO_MANIFEST_KEY,
+ KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON,
+ KUBERNETES_SECRET_TYPE_OPAQUE,
+ VALID_SECRET_TYPES,
+ create_image_pull_secret_with_data,
+ create_secret_with_data,
+ get_secret_data,
+ update_image_pull_secret_with_data,
+ update_secret_with_data,
+)
+from agentex.lib.sdk.config.agent_config import AgentConfig
+from agentex.lib.sdk.config.agent_manifest import AgentManifest
+from agentex.lib.sdk.config.deployment_config import (
+ DeploymentConfig,
+ ImagePullSecretConfig,
+ InjectedSecretsValues,
+)
+from agentex.lib.types.credentials import CredentialMapping
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+
+# TODO: parse this into a Pydantic model.
+def load_values_file(values_path: str) -> dict[str, dict[str, str]]:
+ """Load and parse the values file (YAML/JSON)"""
+ try:
+ path = Path(values_path)
+ content = path.read_text()
+
+ if path.suffix.lower() in [".yaml", ".yml"]:
+ data = yaml.safe_load(content)
+ elif path.suffix.lower() == ".json":
+ data = json.loads(content)
+ else:
+ # Try YAML first, then JSON
+ try:
+ data = yaml.safe_load(content)
+ except yaml.YAMLError:
+ data = json.loads(content)
+ return InjectedSecretsValues.model_validate(data).model_dump()
+
+ except Exception as e:
+ raise RuntimeError(
+ f"Failed to load values file '{values_path}': {str(e)}"
+ ) from e
+
+
+def interactive_secret_input(secret_name: str, secret_key: str) -> str:
+ """Prompt user for secret value with appropriate input method"""
+ console.print(
+ f"\n[bold]Enter value for secret '[cyan]{secret_name}[/cyan]' key '[cyan]{secret_key}[/cyan]':[/bold]"
+ )
+
+ input_type = questionary.select(
+ "What type of value is this?",
+ choices=[
+ "Simple text",
+ "Sensitive/password (hidden input)",
+ "Multi-line text",
+ "JSON/YAML content",
+ "Read from file",
+ ],
+ ).ask()
+
+ input_type = handle_questionary_cancellation(input_type, "secret input")
+
+ if input_type == "Sensitive/password (hidden input)":
+ result = questionary.password("Enter value (input will be hidden):").ask()
+ return handle_questionary_cancellation(result, "password input")
+
+ elif input_type == "Multi-line text":
+ console.print(
+ "[yellow]Enter multi-line text (press Ctrl+D when finished):[/yellow]"
+ )
+ lines = []
+ try:
+ while True:
+ line = input()
+ lines.append(line)
+ except EOFError:
+ pass
+ except KeyboardInterrupt:
+ console.print("[yellow]Multi-line input cancelled by user[/yellow]")
+ raise typer.Exit(0) # noqa
+ return "\n".join(lines)
+
+ elif input_type == "JSON/YAML content":
+ value = questionary.text("Enter JSON/YAML content:").ask()
+ value = handle_questionary_cancellation(value, "JSON/YAML input")
+ # Validate JSON/YAML format
+ try:
+ json.loads(value)
+ except json.JSONDecodeError:
+ try:
+ yaml.safe_load(value)
+ except yaml.YAMLError:
+ console.print(
+ "[yellow]Warning: Content doesn't appear to be valid JSON or YAML[/yellow]"
+ )
+ return value
+
+ elif input_type == "Read from file":
+ file_path = questionary.path("Enter file path:").ask()
+ file_path = handle_questionary_cancellation(file_path, "file path input")
+ try:
+ return Path(file_path).read_text().strip()
+ except Exception as e:
+ console.print(f"[red]Error reading file: {e}[/red]")
+ manual_value = questionary.text("Enter value manually:").ask()
+ return handle_questionary_cancellation(manual_value, "manual value input")
+
+ else: # Simple text
+ result = questionary.text("Enter value:").ask()
+ return handle_questionary_cancellation(result, "text input")
+
+
+def get_secret(name: str, namespace: str, context: str | None = None) -> dict:
+ """Get details about a secret"""
+ v1 = get_k8s_client(context)
+
+ try:
+ secret = v1.read_namespaced_secret(name=name, namespace=namespace)
+ return {
+ "name": secret.metadata.name,
+ "namespace": namespace,
+ "created": secret.metadata.creation_timestamp.isoformat(),
+ "exists": True,
+ }
+ except ApiException as e:
+ if e.status == 404:
+ console.print(
+ f"[red]Error: Secret '{name}' not found in namespace '{namespace}'[/red]"
+ )
+ return {"name": name, "namespace": namespace, "exists": False}
+ raise RuntimeError(f"Failed to get secret: {str(e)}") from e
+
+
+def delete_secret(name: str, namespace: str, context: str | None = None) -> None:
+ """Delete a secret"""
+ v1 = get_k8s_client(context)
+
+ try:
+ v1.delete_namespaced_secret(name=name, namespace=namespace)
+ console.print(
+ f"[green]Deleted secret '{name}' from namespace '{namespace}'[/green]"
+ )
+ except ApiException as e:
+ if e.status == 404:
+ console.print(
+ f"[red]Error: Secret '{name}' not found in namespace '{namespace}'[/red]"
+ )
+ else:
+ console.print(f"[red]Error deleting secret: {e.reason}[/red]")
+ raise RuntimeError(f"Failed to delete secret: {str(e)}") from e
+
+
+def get_kubernetes_secrets_by_type(
+ namespace: str, context: str | None = None
+) -> dict[str, list[dict]]:
+ """List metadata about secrets in the namespace"""
+ v1 = get_k8s_client(context)
+
+ try:
+ secrets = v1.list_namespaced_secret(namespace=namespace)
+ secret_type_to_secret = defaultdict(list)
+ for secret in secrets.items:
+ if secret.type in VALID_SECRET_TYPES:
+ secret_type_to_secret[secret.type].append(
+ {
+ "name": secret.metadata.name,
+ "namespace": namespace,
+ "created": secret.metadata.creation_timestamp.isoformat(),
+ }
+ )
+
+ return secret_type_to_secret
+ except ApiException as e:
+ console.print(
+ f"[red]Error listing secrets in namespace '{namespace}': {e.reason}[/red]"
+ )
+ raise RuntimeError(f"Failed to list secrets: {str(e)}") from e
+
+ # NOTE: This corresponds with KUBERNETES_SECRET_TYPE_OPAQUE
+
+
+def sync_user_defined_secrets(
+ manifest_obj: AgentManifest,
+ found_secrets: list[dict],
+ values_data: dict[str, Any],
+ cluster: str,
+ namespace: str,
+ interactive: bool,
+ changes: dict[str, list[str]],
+) -> None:
+ """Sync user defined secrets between manifest, cluster, and values file"""
+ console.print(
+ f"[bold]Syncing user defined secrets to cluster: {cluster} namespace: {namespace}[/bold]"
+ )
+
+ # Get the secrets from the cluster using the specified namespace and cluster context
+ cluster_secret_names = {secret["name"] for secret in found_secrets}
+ # Get the secrets from the manifest
+ agent_config: AgentConfig = manifest_obj.agent
+ manifest_credentials: list[CredentialMapping] = agent_config.credentials or []
+
+ if not manifest_credentials:
+ console.print("[yellow]No credentials found in manifest[/yellow]")
+ return
+
+ # Build required secrets map from manifest
+ required_secrets = {} # {secret_name: {secret_key: env_var_name}}
+ for cred in manifest_credentials:
+ if cred.secret_name not in required_secrets:
+ required_secrets[cred.secret_name] = {}
+ required_secrets[cred.secret_name][cred.secret_key] = cred.env_var_name
+
+ # Process each required secret
+ for secret_name, required_keys in required_secrets.items():
+ current_secret_data = get_secret_data(secret_name, namespace, cluster)
+ new_secret_data = {}
+ secret_needs_update = False
+
+ # Process each required key in this secret
+ for secret_key, _ in required_keys.items():
+ current_value = current_secret_data.get(secret_key)
+
+ # Get the new value
+ if (
+ values_data
+ and secret_name in values_data
+ and secret_key in values_data[secret_name]
+ ):
+ new_value = values_data[secret_name][secret_key]
+ elif interactive:
+ if current_value:
+ console.print(
+ f"[blue]Secret '{secret_name}' key '{secret_key}' already exists[/blue]"
+ )
+ update_choice = questionary.select(
+ "What would you like to do?",
+ choices=[
+ "Keep current value",
+ "Update with new value",
+ "Show current value",
+ ],
+ ).ask()
+ update_choice = handle_questionary_cancellation(
+ update_choice, "secret update choice"
+ )
+
+ if update_choice == "Show current value":
+ console.print(f"Current value: [dim]{current_value}[/dim]")
+ update_choice = questionary.select(
+ "What would you like to do?",
+ choices=["Keep current value", "Update with new value"],
+ ).ask()
+ update_choice = handle_questionary_cancellation(
+ update_choice, "secret update choice"
+ )
+
+ if update_choice == "Update with new value":
+ new_value = interactive_secret_input(secret_name, secret_key)
+ else:
+ new_value = current_value
+ else:
+ console.print(
+ f"[yellow]Secret '{secret_name}' key '{secret_key}' does not exist[/yellow]"
+ )
+ new_value = interactive_secret_input(secret_name, secret_key)
+ else:
+ raise RuntimeError(
+ f"No value provided for secret '{secret_name}' key '{secret_key}'. Provide values file or use interactive mode."
+ )
+
+ # Must be a string because kubernetes always expects a
+ new_value = str(new_value)
+ new_secret_data[secret_key] = new_value
+
+ # Check if value changed
+ if current_value != new_value:
+ secret_needs_update = True
+ else:
+ changes["noop"].append(
+ f"Secret '{secret_name}' key '{secret_key}' is up to date"
+ )
+
+ # Determine action needed
+ if secret_name not in cluster_secret_names:
+ changes["create"].append(
+ f"Create secret '{secret_name}' with keys: {list(required_keys.keys())}"
+ )
+ create_secret_with_data(secret_name, new_secret_data, namespace, cluster)
+ elif secret_needs_update:
+ changes["update"].append(f"Update secret '{secret_name}' (values changed)")
+ update_secret_with_data(secret_name, new_secret_data, namespace, cluster)
+
+ # Handle orphaned secrets (in cluster but not in manifest)
+ orphaned_secrets = cluster_secret_names - set(required_secrets.keys())
+ if orphaned_secrets:
+ console.print(
+ f"\n[yellow]Warning: Found {len(orphaned_secrets)} secrets in cluster not defined in manifest:[/yellow]"
+ )
+ for secret in orphaned_secrets:
+ console.print(f" - {secret}")
+
+
+def create_dockerconfigjson_string(
+ registry: str, username: str, password: str, email: str | None = None
+) -> str:
+ """Create raw dockerconfigjson string data for use with Kubernetes string_data field"""
+ # Create the auth field (base64 encoded username:password)
+ auth_string = f"{username}:{password}"
+ auth_b64 = base64.b64encode(auth_string.encode("utf-8")).decode("utf-8")
+
+ # Build the auth entry
+ auth_entry = {"username": username, "password": password, "auth": auth_b64}
+
+ # Only include email if provided
+ if email:
+ auth_entry["email"] = email
+
+ # Create the full dockerconfig structure
+ docker_config = {"auths": {registry: auth_entry}}
+
+ # Return raw JSON string (Kubernetes will handle base64 encoding when using string_data)
+ return json.dumps(docker_config)
+
+
+def parse_dockerconfigjson_data(input_data: str) -> dict[str, dict[str, str]]:
+ """Parse existing dockerconfigjson data to extract registry credentials"""
+ try:
+ # Decode base64
+ config = json.loads(input_data)
+
+ # Extract auths section
+ auths = config.get("auths", {})
+
+ # Convert to comparable format: {registry: {username, password, email}}
+ parsed_auths = {}
+ for registry, auth_data in auths.items():
+ # Try to decode the base64 auth field first
+ username = ""
+ password = ""
+ if "auth" in auth_data:
+ try:
+ auth_b64 = auth_data["auth"]
+ username_password = base64.b64decode(auth_b64).decode("utf-8")
+ if ":" in username_password:
+ username, password = username_password.split(":", 1)
+ except Exception:
+ pass
+
+ # Fall back to direct username/password fields if auth decode failed
+ if not username:
+ username = auth_data.get("username", "")
+ if not password:
+ password = auth_data.get("password", "")
+
+ parsed_auths[registry] = {
+ "username": username,
+ "password": password,
+ "email": auth_data.get("email", ""),
+ }
+
+ return parsed_auths
+ except Exception:
+ return {} # If parsing fails, assume empty/invalid
+
+
+def credentials_changed(
+ current_auths: dict[str, dict[str, str]],
+ new_registry: str,
+ new_username: str,
+ new_password: str,
+ new_email: str = "",
+) -> bool:
+ """Check if credentials have actually changed"""
+
+ # If registry doesn't exist in current, it's a change
+ if new_registry not in current_auths:
+ return True
+
+ current_creds = current_auths[new_registry]
+ # Compare each field
+ if (
+ current_creds.get("username", "") != new_username
+ or current_creds.get("password", "") != new_password
+ or current_creds.get("email", "") != (new_email or "")
+ ):
+ return True
+ else:
+ return False # No changes detected
+
+
+def interactive_image_pull_secret_input(secret_name: str) -> dict[str, str]:
+ """Prompt user for image pull secret values"""
+ console.print(
+ f"\n[bold]Configure image pull secret '[cyan]{secret_name}[/cyan]':[/bold]"
+ )
+
+ registry = questionary.text(
+ "Registry URL (e.g., docker.io, gcr.io, your-registry.com):",
+ default="docker.io",
+ ).ask()
+ registry = handle_questionary_cancellation(registry, "registry input")
+
+ username = questionary.text("Username:").ask()
+ username = handle_questionary_cancellation(username, "username input")
+
+ password = questionary.password("Password (input will be hidden):").ask()
+ password = handle_questionary_cancellation(password, "password input")
+
+ email_choice = questionary.confirm(
+ "Do you want to include an email address? (optional)"
+ ).ask()
+ email_choice = handle_questionary_cancellation(email_choice, "email choice")
+ email = ""
+ if email_choice:
+ email = questionary.text("Email address:").ask() or ""
+ if email is None: # Handle None from questionary
+ email = ""
+
+ return {
+ "registry": registry,
+ "username": username,
+ "password": password,
+ "email": email,
+ }
+
+
+def sync_image_pull_secrets(
+ manifest_obj: AgentManifest,
+ found_dockerconfigjson_secrets: list[dict],
+ values_data: dict[str, Any],
+ cluster: str,
+ namespace: str,
+ interactive: bool,
+ changes: dict[str, list[str]],
+) -> None:
+ """Sync image pull secrets between manifest, cluster, and values file"""
+ console.print(
+ f"[bold]Syncing image pull secrets to cluster: {cluster} namespace: {namespace}[/bold]"
+ )
+
+ # Get the secrets of type KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON
+ cluster_dockerconfigjson_secret_names = {
+ secret["name"] for secret in found_dockerconfigjson_secrets
+ }
+
+ # Get the secrets from the manifest
+ deployment_config: DeploymentConfig = manifest_obj.deployment
+ manifest_image_pull_secrets: list[ImagePullSecretConfig] = (
+ deployment_config.imagePullSecrets or []
+ )
+
+ if not manifest_image_pull_secrets:
+ logger.info("No image pull secrets found in manifest")
+ return
+
+ # Get image pull secrets from values data
+ image_pull_values = values_data
+
+ # Process each required image pull secret
+ for pull_secret in manifest_image_pull_secrets:
+ secret_name = pull_secret.name
+ current_secret_data = get_secret_data(secret_name, namespace, cluster)
+
+ # Get new values
+ new_registry = ""
+ new_username = ""
+ new_password = ""
+ new_email = ""
+
+ if secret_name in image_pull_values:
+ # Get values from values file
+ secret_config = image_pull_values[secret_name]
+ new_registry = secret_config.get("registry", "")
+ new_username = secret_config.get("username", "")
+ new_password = secret_config.get("password", "")
+ new_email = secret_config.get("email", "")
+
+ if not new_registry or not new_username or not new_password:
+ raise RuntimeError(
+ f"Incomplete image pull secret configuration for '{secret_name}'. "
+ f"Required: registry, username, password. Optional: email"
+ )
+ elif interactive:
+ # Get values interactively
+ if secret_name in cluster_dockerconfigjson_secret_names:
+ console.print(
+ f"[blue]Image pull secret '{secret_name}' already exists[/blue]"
+ )
+ update_choice = questionary.select(
+ "What would you like to do?",
+ choices=["Keep current credentials", "Update with new credentials"],
+ ).ask()
+ update_choice = handle_questionary_cancellation(
+ update_choice, "image pull secret update choice"
+ )
+
+ if update_choice == "Keep current credentials":
+ continue # Skip this secret
+
+ console.print(
+ f"[yellow]Image pull secret '{secret_name}' needs configuration[/yellow]"
+ )
+ creds = interactive_image_pull_secret_input(secret_name)
+ new_registry = creds["registry"]
+ new_username = creds["username"]
+ new_password = creds["password"]
+ new_email = creds["email"]
+ else:
+ raise RuntimeError(
+ f"No configuration provided for image pull secret '{secret_name}'. "
+ f"Provide values file or use interactive mode."
+ )
+
+ # Check if update is needed
+ secret_needs_update = False
+ action = ""
+
+ if secret_name not in cluster_dockerconfigjson_secret_names:
+ # Secret doesn't exist, needs creation
+ secret_needs_update = True
+ action = "create"
+ else:
+ # Secret exists, check if values changed
+ current_dockerconfig = current_secret_data.get(".dockerconfigjson", {})
+ current_auths = parse_dockerconfigjson_data(current_dockerconfig)
+ if credentials_changed(
+ current_auths, new_registry, new_username, new_password, new_email
+ ):
+ secret_needs_update = True
+ action = "update"
+ else:
+ changes["noop"].append(
+ f"Secret '{secret_name}' key '{secret_name}' is up to date"
+ )
+
+ # Only perform action if update is needed
+ if secret_needs_update:
+ dockerconfig_string = create_dockerconfigjson_string(
+ new_registry, new_username, new_password, new_email
+ )
+ secret_data = {".dockerconfigjson": dockerconfig_string}
+
+ if action == "create":
+ changes[action].append(
+ f"Create image pull secret '{secret_name}' for registry '{new_registry}'"
+ )
+ create_image_pull_secret_with_data(
+ secret_name, secret_data, namespace, cluster
+ )
+ elif action == "update":
+ changes[action].append(
+ f"Update image pull secret '{secret_name}' (credentials changed)"
+ )
+ update_image_pull_secret_with_data(
+ secret_name, secret_data, namespace, cluster
+ )
+
+
+def print_changes_summary(change_type: str, changes: dict[str, list[str]]) -> None:
+ # Show summary
+ console.print(f"\n[bold]Sync Summary for {change_type}:[/bold]")
+ if changes["create"]:
+ console.print("[green]Created:[/green]")
+ for change in changes["create"]:
+ console.print(f" ✓ {change}")
+
+ if changes["update"]:
+ console.print("[yellow]Updated:[/yellow]")
+ for change in changes["update"]:
+ console.print(f" ⚠ {change}")
+
+ if changes["noop"]:
+ console.print("[yellow]No changes:[/yellow]")
+ for change in changes["noop"]:
+ console.print(f" ✓ {change}")
+ del changes["noop"]
+
+ if not any(changes.values()):
+ console.print(
+ f"[green]✓ All secrets are already in sync for {change_type}[/green]"
+ )
+
+ console.print("")
+
+
+def sync_secrets(
+ manifest_obj: AgentManifest,
+ cluster: str,
+ namespace: str,
+ interactive: bool,
+ values_path: str | None,
+) -> None:
+ """Sync secrets between manifest, cluster, and values file"""
+ logger.info(f"Syncing secrets to cluster: {cluster} namespace: {namespace}")
+
+ # Load values from file if provided
+ values_data = {}
+ if values_path:
+ try:
+ # TODO: Convert this to a pydantic model to validate the values file
+ values_data = load_values_file(values_path)
+ console.print(f"[green]Loaded values from {values_path}[/green]")
+ except Exception as e:
+ console.print(f"[red]Error loading values file: {e}[/red]")
+ raise
+
+ # Get the secrets from the cluster using the specified namespace and cluster context
+ cluster_secrets_by_type = get_kubernetes_secrets_by_type(
+ namespace=namespace, context=cluster
+ )
+
+ # Track changes for summary
+ changes = {"create": [], "update": [], "noop": []}
+
+ sync_user_defined_secrets(
+ manifest_obj,
+ cluster_secrets_by_type[KUBERNETES_SECRET_TYPE_OPAQUE],
+ values_data.get(
+ KUBERNETES_SECRET_TO_MANIFEST_KEY[KUBERNETES_SECRET_TYPE_OPAQUE], {}
+ ),
+ cluster,
+ namespace,
+ interactive,
+ changes,
+ )
+
+ print_changes_summary("User Defined Secrets", changes)
+
+ # Track changes for summary
+ changes = {"create": [], "update": [], "noop": []}
+
+ sync_image_pull_secrets(
+ manifest_obj,
+ cluster_secrets_by_type[KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON],
+ values_data.get(
+ KUBERNETES_SECRET_TO_MANIFEST_KEY[KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON],
+ {},
+ ),
+ cluster,
+ namespace,
+ interactive,
+ changes,
+ )
+
+ print_changes_summary("Image Pull Secrets", changes)
+
+ console.print(
+ f"\n[green]Secret sync completed for cluster '{cluster}' namespace '{namespace}'[/green]"
+ )
diff --git a/src/agentex/lib/cli/templates/default/.dockerignore.j2 b/src/agentex/lib/cli/templates/default/.dockerignore.j2
new file mode 100644
index 000000000..15b178c77
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/.dockerignore.j2
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
diff --git a/src/agentex/lib/cli/templates/default/Dockerfile-uv.j2 b/src/agentex/lib/cli/templates/default/Dockerfile-uv.j2
new file mode 100644
index 000000000..e17d369b5
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/Dockerfile-uv.j2
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the pyproject.toml file to optimize caching
+COPY {{ project_path_from_build_root }}/pyproject.toml /app/{{ project_path_from_build_root }}/pyproject.toml
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages using uv
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system .
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/default/Dockerfile.j2 b/src/agentex/lib/cli/templates/default/Dockerfile.j2
new file mode 100644
index 000000000..d8dcad439
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/Dockerfile.j2
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY {{ project_path_from_build_root }}/requirements.txt /app/{{ project_path_from_build_root }}/requirements.txt
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/default/README.md.j2 b/src/agentex/lib/cli/templates/default/README.md.j2
new file mode 100644
index 000000000..5ce149aad
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/README.md.j2
@@ -0,0 +1,199 @@
+# {{ agent_name }} - AgentEx Starter Template
+
+This is a generic starter template for building agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server
+- Handles each of the required ACP events with simple print statements
+- Provides a foundation for building more complex agents
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+{{ project_name }}/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+{% if use_uv %}
+└── pyproject.toml # Dependencies (uv)
+{% else %}
+└── requirements.txt # Dependencies (pip)
+{% endif %}
+```
+
+## Development
+
+### 1. Customize Event Handlers
+- Modify the handlers in `acp.py` to implement your agent's logic
+- Add your own tools and capabilities
+- Implement custom state management
+
+### 2. Manage Dependencies
+
+{% if use_uv %}
+You chose **uv** for package management. Here's how to work with dependencies:
+
+```bash
+# Add new dependencies
+agentex uv add requests openai anthropic
+
+# Install/sync dependencies
+agentex uv sync
+
+# Run commands with uv
+uv run agentex agents run --manifest manifest.yaml
+```
+
+**Benefits of uv:**
+- Faster dependency resolution and installation
+- Better dependency isolation
+- Integrated CodeArtifact support
+- Modern Python packaging standards
+
+{% else %}
+You chose **pip** for package management. Here's how to work with dependencies:
+
+```bash
+# Edit requirements.txt manually to add dependencies
+echo "requests" >> requirements.txt
+echo "openai" >> requirements.txt
+
+# Install dependencies
+pip install -r requirements.txt
+```
+
+**Benefits of pip:**
+- Familiar workflow for most Python developers
+- Simple requirements.txt management
+- Wide compatibility
+{% endif %}
+
+### 3. Configure Credentials
+Options:
+1. Add any required credentials to your manifest.yaml via the `env` section
+2. Export them in your shell: `export OPENAI_API_KEY=...`
+3. For local development, create a `.env.local` file in the project directory
+
+```python
+import os
+from dotenv import load_dotenv
+
+if os.environ.get("ENVIRONMENT") == "development":
+ load_dotenv()
+```
+
+## Local Development
+
+
+### 1. Start the Agentex Backend
+```bash
+# Navigate to the backend directory
+cd agentex
+
+# Start all services using Docker Compose
+make dev
+
+# Optional: In a separate terminal, use lazydocker for a better UI (everything should say "healthy")
+lzd
+```
+
+### 2. Setup Your Agent's requirements/pyproject.toml
+```bash
+agentex uv sync [--group editable-apy]
+source .venv/bin/activate
+
+# OR
+conda create -n {{ project_name }} python=3.12
+conda activate {{ project_name }}
+pip install -r requirements.txt
+```
+### 3. Run Your Agent
+```bash
+# From this directory
+export ENVIRONMENT=development && [uv run] agentex agents run --manifest manifest.yaml
+```
+
+### 4. Interact with Your Agent
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+### Environment Variables
+- Set environment variables in project/.env for any required credentials
+- Or configure them in the manifest.yaml under the `env` section
+- The `.env` file is automatically loaded in development mode
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
diff --git a/src/agentex/lib/cli/templates/default/deploy/example.yaml.j2 b/src/agentex/lib/cli/templates/default/deploy/example.yaml.j2
new file mode 100644
index 000000000..d4874d5ba
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/deploy/example.yaml.j2
@@ -0,0 +1,55 @@
+# Example Override Configuration
+# =============================
+# This file shows how to override deployment settings from the global defaults in manifest.yaml.
+# You can create multiple override files for different environments (e.g., staging.yaml, prod.yaml, dev.yaml)
+# Only specify values that differ from the global defaults in manifest.yaml
+
+# Override image tag
+# image:
+# tag: "v1.2.3"
+
+# Override replica count
+# replicaCount: 2
+
+# Environment-specific environment variables
+# env:
+# - name: LOG_LEVEL
+# value: "DEBUG"
+# - name: ENVIRONMENT
+# value: "staging"
+
+# Override resource requirements
+# resources:
+# requests:
+# cpu: "250m"
+# memory: "512Mi"
+# limits:
+# cpu: "500m"
+# memory: "1Gi"
+
+# Advanced: Additional helm chart value overrides
+# Use this for any helm chart values not covered by the simple options above
+# additional_overrides:
+# autoscaling:
+# enabled: true
+# minReplicas: 2
+# maxReplicas: 10
+# targetCPUUtilizationPercentage: 70
+#
+# service:
+# type: LoadBalancer
+#
+# ingress:
+# enabled: true
+# annotations:
+# kubernetes.io/ingress.class: "nginx"
+# hosts:
+# - host: {{ agent_name }}.example.com
+# paths:
+# - path: /
+# pathType: Prefix
+
+# To use this configuration:
+# 1. Copy this file to a new file (e.g., staging.yaml, prod.yaml)
+# 2. Uncomment and modify the values you want to override
+# 3. Deploy with: agentex agents deploy --cluster your-cluster --namespace your-namespace --override-file staging.yaml
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/default/manifest.yaml.j2 b/src/agentex/lib/cli/templates/default/manifest.yaml.j2
new file mode 100644
index 000000000..6f7df0414
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/manifest.yaml.j2
@@ -0,0 +1,116 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - {{ project_path_from_build_root }}
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: {{ project_path_from_build_root }}/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: {{ project_path_from_build_root }}/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: {{ agent_name }}
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: {{ description }}
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # OPENAI_API_KEY: ""
+ # OPENAI_BASE_URL: ""
+ # OPENAI_ORG_ID: ""
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "{{ agent_name }}"
+ description: "{{ description }}"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/default/project/acp.py.j2 b/src/agentex/lib/cli/templates/default/project/acp.py.j2
new file mode 100644
index 000000000..e68dbea02
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/project/acp.py.j2
@@ -0,0 +1,29 @@
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import AgenticACPConfig
+from agentex.lib.types.acp import SendEventParams, CancelTaskParams, CreateTaskParams
+
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=AgenticACPConfig(type="base")
+)
+
+
+@acp.on_task_event_send
+async def handle_task_event_send(params: SendEventParams):
+ # For this tutorial, we print the parameters sent to the handler
+ # so you can see where and how messages within a task are handled
+ print(f"Hello world! I just received this message: {params}")
+
+@acp.on_task_cancel
+async def handle_task_canceled(params: CancelTaskParams):
+ # For this tutorial, we print the parameters sent to the handler
+ # so you can see where and how task cancellation is handled
+ print(f"Hello world! Task canceled: {params.task.id}")
+
+@acp.on_task_create
+async def handle_task_create(params: CreateTaskParams):
+ # For this tutorial, we print the parameters sent to the handler
+ # so you can see where and how task creation is handled
+ print(f"Hello world! Task created: {params.task.id}")
diff --git a/src/agentex/lib/cli/templates/default/pyproject.toml.j2 b/src/agentex/lib/cli/templates/default/pyproject.toml.j2
new file mode 100644
index 000000000..3230dbecf
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/pyproject.toml.j2
@@ -0,0 +1,41 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "{{ project_name }}"
+version = "0.1.0"
+description = "{{ description }}"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "agentex-sdk",
+ "scale-gp",
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest",
+ "black",
+ "isort",
+ "flake8",
+]
+
+[dependency-groups]
+editable-apy = [
+ "agentex-sdk @ file://{{ agentex_py_path }}",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["project"]
+
+[tool.black]
+line-length = 88
+target-version = ['py312']
+
+[tool.isort]
+profile = "black"
+line_length = 88
+
+[[tool.uv.index]]
+url = "https://scale-307185671274.d.codeartifact.us-west-2.amazonaws.com/pypi/scale-pypi/simple/"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/default/requirements.txt.j2 b/src/agentex/lib/cli/templates/default/requirements.txt.j2
new file mode 100644
index 000000000..0b8ae19b3
--- /dev/null
+++ b/src/agentex/lib/cli/templates/default/requirements.txt.j2
@@ -0,0 +1,5 @@
+# Install agentex-sdk from local path
+agentex-sdk
+
+# Scale GenAI Platform Python SDK
+scale-gp
diff --git "a/src/agentex/lib/cli/templates/deploy/Screenshot 2025-03-19 at 10.36.57\342\200\257AM.png" "b/src/agentex/lib/cli/templates/deploy/Screenshot 2025-03-19 at 10.36.57\342\200\257AM.png"
new file mode 100644
index 000000000..ef18c6e81
Binary files /dev/null and "b/src/agentex/lib/cli/templates/deploy/Screenshot 2025-03-19 at 10.36.57\342\200\257AM.png" differ
diff --git a/src/agentex/lib/cli/templates/deploy/example.yaml.j2 b/src/agentex/lib/cli/templates/deploy/example.yaml.j2
new file mode 100644
index 000000000..d4874d5ba
--- /dev/null
+++ b/src/agentex/lib/cli/templates/deploy/example.yaml.j2
@@ -0,0 +1,55 @@
+# Example Override Configuration
+# =============================
+# This file shows how to override deployment settings from the global defaults in manifest.yaml.
+# You can create multiple override files for different environments (e.g., staging.yaml, prod.yaml, dev.yaml)
+# Only specify values that differ from the global defaults in manifest.yaml
+
+# Override image tag
+# image:
+# tag: "v1.2.3"
+
+# Override replica count
+# replicaCount: 2
+
+# Environment-specific environment variables
+# env:
+# - name: LOG_LEVEL
+# value: "DEBUG"
+# - name: ENVIRONMENT
+# value: "staging"
+
+# Override resource requirements
+# resources:
+# requests:
+# cpu: "250m"
+# memory: "512Mi"
+# limits:
+# cpu: "500m"
+# memory: "1Gi"
+
+# Advanced: Additional helm chart value overrides
+# Use this for any helm chart values not covered by the simple options above
+# additional_overrides:
+# autoscaling:
+# enabled: true
+# minReplicas: 2
+# maxReplicas: 10
+# targetCPUUtilizationPercentage: 70
+#
+# service:
+# type: LoadBalancer
+#
+# ingress:
+# enabled: true
+# annotations:
+# kubernetes.io/ingress.class: "nginx"
+# hosts:
+# - host: {{ agent_name }}.example.com
+# paths:
+# - path: /
+# pathType: Prefix
+
+# To use this configuration:
+# 1. Copy this file to a new file (e.g., staging.yaml, prod.yaml)
+# 2. Uncomment and modify the values you want to override
+# 3. Deploy with: agentex agents deploy --cluster your-cluster --namespace your-namespace --override-file staging.yaml
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/.dockerignore.j2 b/src/agentex/lib/cli/templates/sync/.dockerignore.j2
new file mode 100644
index 000000000..15b178c77
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/.dockerignore.j2
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
diff --git a/src/agentex/lib/cli/templates/sync/Dockerfile-uv.j2 b/src/agentex/lib/cli/templates/sync/Dockerfile-uv.j2
new file mode 100644
index 000000000..e17d369b5
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/Dockerfile-uv.j2
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the pyproject.toml file to optimize caching
+COPY {{ project_path_from_build_root }}/pyproject.toml /app/{{ project_path_from_build_root }}/pyproject.toml
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages using uv
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system .
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/Dockerfile.j2 b/src/agentex/lib/cli/templates/sync/Dockerfile.j2
new file mode 100644
index 000000000..d8dcad439
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/Dockerfile.j2
@@ -0,0 +1,45 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY {{ project_path_from_build_root }}/requirements.txt /app/{{ project_path_from_build_root }}/requirements.txt
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Set environment variables
+ENV PYTHONPATH=/app
+
+# Run the agent using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/README.md.j2 b/src/agentex/lib/cli/templates/sync/README.md.j2
new file mode 100644
index 000000000..ae18308ed
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/README.md.j2
@@ -0,0 +1,300 @@
+# {{ agent_name }} - AgentEx Sync ACP Template
+
+This is a starter template for building synchronous agents with the AgentEx framework. It provides a basic implementation of the Agent 2 Client Protocol (ACP) with immediate response capabilities to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **Sync ACP**: Synchronous Agent Communication Protocol that requires immediate responses
+- **Message Handling**: How to process and respond to messages in real-time
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and respond immediately to any messages it receives.
+
+## What's Inside
+
+This template:
+- Sets up a basic sync ACP server
+- Handles incoming messages with immediate responses
+- Provides a foundation for building real-time agents
+- Can include streaming support for long responses
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+{{ project_name }}/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ └── acp.py # ACP server and event handlers
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+{% if use_uv %}
+└── pyproject.toml # Dependencies (uv)
+{% else %}
+└── requirements.txt # Dependencies (pip)
+{% endif %}
+```
+
+## Development
+
+### 1. Customize Message Handlers
+- Modify the handlers in `acp.py` to implement your agent's logic
+- Add your own tools and capabilities
+- Implement custom response generation
+
+### 2. Manage Dependencies
+
+{% if use_uv %}
+You chose **uv** for package management. Here's how to work with dependencies:
+
+```bash
+# Add new dependencies
+agentex uv add requests openai anthropic
+
+# Install/sync dependencies
+agentex uv sync
+
+# Run commands with uv
+uv run agentex agents run --manifest manifest.yaml
+```
+
+**Benefits of uv:**
+- Faster dependency resolution and installation
+- Better dependency isolation
+- Integrated CodeArtifact support
+- Modern Python packaging standards
+
+{% else %}
+You chose **pip** for package management. Here's how to work with dependencies:
+
+```bash
+# Edit requirements.txt manually to add dependencies
+echo "requests" >> requirements.txt
+echo "openai" >> requirements.txt
+
+# Install dependencies
+pip install -r requirements.txt
+```
+
+**Benefits of pip:**
+- Familiar workflow for most Python developers
+- Simple requirements.txt management
+- Wide compatibility
+{% endif %}
+
+### 3. Configure Credentials
+Options:
+1. Add any required credentials to your manifest.yaml via the `env` section
+2. Export them in your shell: `export OPENAI_API_KEY=...`
+3. For local development, create a `.env.local` file in the project directory
+
+## Local Development
+
+### 1. Start the Agentex Backend
+```bash
+# Navigate to the backend directory
+cd agentex
+
+# Start all services using Docker Compose
+make dev
+
+# Optional: In a separate terminal, use lazydocker for a better UI (everything should say "healthy")
+lzd
+```
+
+### 3. Run Your Agent
+```bash
+# From this directory
+export ENVIRONMENT=development && agentex agents run --manifest manifest.yaml
+```
+
+### 4. Interact with Your Agent
+
+**Option 1: Web UI (Recommended)**
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+**Option 2: CLI (Deprecated)**
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+## Development Tips
+
+### Environment Variables
+- Set environment variables in project/.env for any required credentials
+- Or configure them in the manifest.yaml under the `env` section
+- The `.env` file is automatically loaded in development mode
+
+### Local Testing
+- Use `export ENVIRONMENT=development` before running your agent
+- This enables local service discovery and debugging features
+- Your agent will automatically connect to locally running services
+
+### Sync ACP Considerations
+- Responses must be immediate (no long-running operations)
+- Use streaming for longer responses
+- Keep processing lightweight and fast
+- Consider caching for frequently accessed data
+
+### Debugging
+- Check agent logs in the terminal where you ran the agent
+- Use the web UI to inspect task history and responses
+- Monitor backend services with `lzd` (LazyDocker)
+- Test response times and optimize for speed
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+{% if use_uv %}
+```bash
+# Build with uv
+agentex agents build --manifest manifest.yaml --push
+```
+{% else %}
+```bash
+# Build with pip
+agentex agents build --manifest manifest.yaml --push
+```
+{% endif %}
+
+
+## Advanced Features
+
+### Streaming Responses
+Handle long responses with streaming:
+
+```python
+# In project/acp.py
+@acp.on_message_send
+async def handle_message_send(params: SendMessageParams):
+ # For streaming responses
+ async def stream_response():
+ for chunk in generate_response_chunks():
+ yield TaskMessageUpdate(
+ content=chunk,
+ is_complete=False
+ )
+ yield TaskMessageUpdate(
+ content="",
+ is_complete=True
+ )
+
+ return stream_response()
+```
+
+### Custom Response Logic
+Add sophisticated response generation:
+
+```python
+# In project/acp.py
+@acp.on_message_send
+async def handle_message_send(params: SendMessageParams):
+ # Analyze input
+ user_message = params.content.content
+
+ # Generate response
+ response = await generate_intelligent_response(user_message)
+
+ return TextContent(
+ author=MessageAuthor.AGENT,
+ content=response
+ )
+```
+
+### Integration with External Services
+{% if use_uv %}
+```bash
+# Add service clients
+agentex uv add httpx requests-oauthlib
+
+# Add AI/ML libraries
+agentex uv add openai anthropic transformers
+
+# Add fast processing libraries
+agentex uv add numpy pandas
+```
+{% else %}
+```bash
+# Add to requirements.txt
+echo "httpx" >> requirements.txt
+echo "openai" >> requirements.txt
+echo "numpy" >> requirements.txt
+pip install -r requirements.txt
+```
+{% endif %}
+
+## Troubleshooting
+
+### Common Issues
+
+1. **Agent not appearing in web UI**
+ - Check if agent is running on port 8000
+ - Verify `ENVIRONMENT=development` is set
+ - Check agent logs for errors
+
+2. **Slow response times**
+ - Profile your message handling code
+ - Consider caching expensive operations
+ - Optimize database queries and API calls
+
+3. **Dependency issues**
+{% if use_uv %}
+ - Run `agentex uv sync` to ensure all dependencies are installed
+ - Check if CodeArtifact authentication is working
+{% else %}
+ - Run `pip install -r requirements.txt`
+ - Check if all dependencies are correctly listed in requirements.txt
+{% endif %}
+
+4. **Port conflicts**
+ - Check if another service is using port 8000
+ - Use `lsof -i :8000` to find conflicting processes
+
+Happy building with Sync ACP! 🚀⚡
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/deploy/example.yaml.j2 b/src/agentex/lib/cli/templates/sync/deploy/example.yaml.j2
new file mode 100644
index 000000000..d4874d5ba
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/deploy/example.yaml.j2
@@ -0,0 +1,55 @@
+# Example Override Configuration
+# =============================
+# This file shows how to override deployment settings from the global defaults in manifest.yaml.
+# You can create multiple override files for different environments (e.g., staging.yaml, prod.yaml, dev.yaml)
+# Only specify values that differ from the global defaults in manifest.yaml
+
+# Override image tag
+# image:
+# tag: "v1.2.3"
+
+# Override replica count
+# replicaCount: 2
+
+# Environment-specific environment variables
+# env:
+# - name: LOG_LEVEL
+# value: "DEBUG"
+# - name: ENVIRONMENT
+# value: "staging"
+
+# Override resource requirements
+# resources:
+# requests:
+# cpu: "250m"
+# memory: "512Mi"
+# limits:
+# cpu: "500m"
+# memory: "1Gi"
+
+# Advanced: Additional helm chart value overrides
+# Use this for any helm chart values not covered by the simple options above
+# additional_overrides:
+# autoscaling:
+# enabled: true
+# minReplicas: 2
+# maxReplicas: 10
+# targetCPUUtilizationPercentage: 70
+#
+# service:
+# type: LoadBalancer
+#
+# ingress:
+# enabled: true
+# annotations:
+# kubernetes.io/ingress.class: "nginx"
+# hosts:
+# - host: {{ agent_name }}.example.com
+# paths:
+# - path: /
+# pathType: Prefix
+
+# To use this configuration:
+# 1. Copy this file to a new file (e.g., staging.yaml, prod.yaml)
+# 2. Uncomment and modify the values you want to override
+# 3. Deploy with: agentex agents deploy --cluster your-cluster --namespace your-namespace --override-file staging.yaml
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/manifest.yaml.j2 b/src/agentex/lib/cli/templates/sync/manifest.yaml.j2
new file mode 100644
index 000000000..213849abe
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/manifest.yaml.j2
@@ -0,0 +1,116 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - {{ project_path_from_build_root }}
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: {{ project_path_from_build_root }}/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: {{ project_path_from_build_root }}/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ acp_type: sync
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: {{ agent_name }}
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: {{ description }}
+
+ # Temporal workflow configuration
+ # Set enabled: true to use Temporal workflows for long-running tasks
+ temporal:
+ enabled: false
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # OPENAI_API_KEY: ""
+ # OPENAI_BASE_URL: ""
+ # OPENAI_ORG_ID: ""
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden in cluster-specific files (deploy/*.yaml)
+ global:
+ agent:
+ name: "{{ agent_name }}"
+ description: "{{ description }}"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/project/acp.py.j2 b/src/agentex/lib/cli/templates/sync/project/acp.py.j2
new file mode 100644
index 000000000..d990f5c5d
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/project/acp.py.j2
@@ -0,0 +1,27 @@
+from typing import AsyncGenerator, Union
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.acp import SendMessageParams
+
+from agentex.lib.types.task_message_updates import TaskMessageUpdate
+from agentex.types.task_messages import TaskMessageContent, TextContent
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+# Create an ACP server
+acp = FastACP.create(
+ acp_type="sync",
+)
+
+
+@acp.on_message_send
+async def handle_message_send(
+ params: SendMessageParams
+) -> Union[TaskMessageContent, AsyncGenerator[TaskMessageUpdate, None]]:
+ """Default message handler with streaming support"""
+ return TextContent(
+ author="agent",
+ content=f"Hello! I've received your message. Here's a generic response, but in future tutorials we'll see how you can get me to intelligently respond to your message. This is what I heard you say: {params.content.content}",
+ )
+
diff --git a/src/agentex/lib/cli/templates/sync/pyproject.toml.j2 b/src/agentex/lib/cli/templates/sync/pyproject.toml.j2
new file mode 100644
index 000000000..3230dbecf
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/pyproject.toml.j2
@@ -0,0 +1,41 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "{{ project_name }}"
+version = "0.1.0"
+description = "{{ description }}"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "agentex-sdk",
+ "scale-gp",
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest",
+ "black",
+ "isort",
+ "flake8",
+]
+
+[dependency-groups]
+editable-apy = [
+ "agentex-sdk @ file://{{ agentex_py_path }}",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["project"]
+
+[tool.black]
+line-length = 88
+target-version = ['py312']
+
+[tool.isort]
+profile = "black"
+line_length = 88
+
+[[tool.uv.index]]
+url = "https://scale-307185671274.d.codeartifact.us-west-2.amazonaws.com/pypi/scale-pypi/simple/"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/sync/requirements.txt.j2 b/src/agentex/lib/cli/templates/sync/requirements.txt.j2
new file mode 100644
index 000000000..0b8ae19b3
--- /dev/null
+++ b/src/agentex/lib/cli/templates/sync/requirements.txt.j2
@@ -0,0 +1,5 @@
+# Install agentex-sdk from local path
+agentex-sdk
+
+# Scale GenAI Platform Python SDK
+scale-gp
diff --git a/src/agentex/lib/cli/templates/temporal/.dockerignore.j2 b/src/agentex/lib/cli/templates/temporal/.dockerignore.j2
new file mode 100644
index 000000000..4521d8465
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/.dockerignore.j2
@@ -0,0 +1,46 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Environments
+.env**
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Git
+.git
+.gitignore
+
+# Misc
+.DS_Store
+
+# CodeArtifact configuration
+.codeartifact-pip-conf
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/Dockerfile-uv.j2 b/src/agentex/lib/cli/templates/temporal/Dockerfile-uv.j2
new file mode 100644
index 000000000..1980a0fae
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/Dockerfile-uv.j2
@@ -0,0 +1,51 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install tctl (Temporal CLI)
+RUN curl -L https://github.com/temporalio/tctl/releases/download/v1.18.1/tctl_1.18.1_linux_arm64.tar.gz -o /tmp/tctl.tar.gz && \
+ tar -xzf /tmp/tctl.tar.gz -C /usr/local/bin && \
+ chmod +x /usr/local/bin/tctl && \
+ rm /tmp/tctl.tar.gz
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the pyproject.toml file to optimize caching
+COPY {{ project_path_from_build_root }}/pyproject.toml /app/{{ project_path_from_build_root }}/pyproject.toml
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages using uv
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system .
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Run the ACP server using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
+
+# When we deploy the worker, we will replace the CMD with the following
+# CMD ["python", "-m", "run_worker"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/Dockerfile.j2 b/src/agentex/lib/cli/templates/temporal/Dockerfile.j2
new file mode 100644
index 000000000..63bcfeef3
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/Dockerfile.j2
@@ -0,0 +1,51 @@
+# syntax=docker/dockerfile:1.3
+FROM python:3.12-slim
+COPY --from=ghcr.io/astral-sh/uv:0.6.4 /uv /uvx /bin/
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ htop \
+ vim \
+ curl \
+ tar \
+ python3-dev \
+ postgresql-client \
+ build-essential \
+ libpq-dev \
+ gcc \
+ cmake \
+ netcat-openbsd \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install tctl (Temporal CLI)
+RUN curl -L https://github.com/temporalio/tctl/releases/download/v1.18.1/tctl_1.18.1_linux_arm64.tar.gz -o /tmp/tctl.tar.gz && \
+ tar -xzf /tmp/tctl.tar.gz -C /usr/local/bin && \
+ chmod +x /usr/local/bin/tctl && \
+ rm /tmp/tctl.tar.gz
+
+RUN uv pip install --system --upgrade pip setuptools wheel
+
+ENV UV_HTTP_TIMEOUT=1000
+
+# Copy just the requirements file to optimize caching
+COPY {{ project_path_from_build_root }}/requirements.txt /app/{{ project_path_from_build_root }}/requirements.txt
+
+WORKDIR /app/{{ project_path_from_build_root }}
+
+# Install the required Python packages
+RUN --mount=type=secret,id=codeartifact-pip-conf,target=/etc/pip.conf \
+ export UV_INDEX_URL=$(grep -E "^index-url" /etc/pip.conf | cut -d'=' -f2- | xargs) && \
+ export UV_EXTRA_INDEX_URL=$(grep -E "^extra-index-url" /etc/pip.conf | cut -d'=' -f2- | xargs || echo "") && \
+ uv pip install --system -r requirements.txt
+
+# Copy the project code
+COPY {{ project_path_from_build_root }}/project /app/{{ project_path_from_build_root }}/project
+
+WORKDIR /app/{{ project_path_from_build_root }}/project
+
+# Run the ACP server using uvicorn
+CMD ["uvicorn", "acp:acp", "--host", "0.0.0.0", "--port", "8000"]
+
+# When we deploy the worker, we will replace the CMD with the following
+# CMD ["python", "-m", "run_worker"]
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/README.md.j2 b/src/agentex/lib/cli/templates/temporal/README.md.j2
new file mode 100644
index 000000000..536658845
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/README.md.j2
@@ -0,0 +1,323 @@
+# {{ agent_name }} - AgentEx Temporal Agent Template
+
+This is a starter template for building asynchronous agents with the AgentEx framework and Temporal. It provides a basic implementation of the Agent 2 Client Protocol (ACP) with Temporal workflow support to help you get started quickly.
+
+## What You'll Learn
+
+- **Tasks**: A task is a grouping mechanism for related messages. Think of it as a conversation thread or a session.
+- **Messages**: Messages are communication objects within a task. They can contain text, data, or instructions.
+- **ACP Events**: The agent responds to four main events:
+ - `task_received`: When a new task is created
+ - `task_message_received`: When a message is sent within a task
+ - `task_approved`: When a task is approved
+ - `task_canceled`: When a task is canceled
+- **Temporal Workflows**: Long-running processes that can handle complex state management and async operations
+
+## Running the Agent
+
+1. Run the agent locally:
+```bash
+agentex agents run --manifest manifest.yaml
+```
+
+The agent will start on port 8000 and print messages whenever it receives any of the ACP events.
+
+## What's Inside
+
+This template:
+- Sets up a basic ACP server with Temporal integration
+- Handles each of the required ACP events
+- Provides a foundation for building complex async agents
+- Includes Temporal workflow and activity definitions
+
+## Next Steps
+
+For more advanced agent development, check out the AgentEx tutorials:
+
+- **Tutorials 00-08**: Learn about building synchronous agents with ACP
+- **Tutorials 09-10**: Learn how to use Temporal to power asynchronous agents
+ - Tutorial 09: Basic Temporal workflow setup
+ - Tutorial 10: Advanced Temporal patterns and best practices
+
+These tutorials will help you understand:
+- How to handle long-running tasks
+- Implementing state machines
+- Managing complex workflows
+- Best practices for async agent development
+
+## The Manifest File
+
+The `manifest.yaml` file is your agent's configuration file. It defines:
+- How your agent should be built and packaged
+- What files are included in your agent's Docker image
+- Your agent's name and description
+- Local development settings (like the port your agent runs on)
+- Temporal worker configuration
+
+This file is essential for both local development and deployment of your agent.
+
+## Project Structure
+
+```
+{{ project_name }}/
+├── project/ # Your agent's code
+│ ├── __init__.py
+│ ├── acp.py # ACP server and event handlers
+│ ├── workflow.py # Temporal workflow definitions
+│ ├── activities.py # Temporal activity definitions
+│ └── run_worker.py # Temporal worker setup
+├── Dockerfile # Container definition
+├── manifest.yaml # Deployment config
+{% if use_uv %}
+└── pyproject.toml # Dependencies (uv)
+{% else %}
+└── requirements.txt # Dependencies (pip)
+{% endif %}
+```
+
+## Development
+
+### 1. Customize Event Handlers
+- Modify the handlers in `acp.py` to implement your agent's logic
+- Add your own tools and capabilities
+- Implement custom state management
+
+### 2. Develop Temporal Workflows
+- Edit `workflow.py` to define your agent's async workflow logic
+- Modify `activities.py` to add custom activities
+- Use `run_worker.py` to configure the Temporal worker
+
+### 3. Manage Dependencies
+
+{% if use_uv %}
+You chose **uv** for package management. Here's how to work with dependencies:
+
+```bash
+# Add new dependencies
+agentex uv add requests openai anthropic
+
+# Add Temporal-specific dependencies (already included)
+agentex uv add temporalio
+
+# Install/sync dependencies
+agentex uv sync
+
+# Run commands with uv
+uv run agentex agents run --manifest manifest.yaml
+```
+
+**Benefits of uv:**
+- Faster dependency resolution and installation
+- Better dependency isolation
+- Integrated CodeArtifact support
+- Modern Python packaging standards
+
+{% else %}
+You chose **pip** for package management. Here's how to work with dependencies:
+
+```bash
+# Probably create a conda env for your agent.
+# Optionally add agentex-sdk editable installation
+
+# Edit requirements.txt manually to add dependencies
+echo "requests" >> requirements.txt
+echo "openai" >> requirements.txt
+
+# Temporal dependencies are already included
+# temporalio is already in requirements.txt
+
+# Install dependencies
+pip install -r requirements.txt
+```
+
+**Benefits of pip:**
+- Familiar workflow for most Python developers
+- Simple requirements.txt management
+- Wide compatibility
+{% endif %}
+
+### 4. Configure Credentials
+- Add any required credentials to your manifest.yaml
+- For local development, create a `.env` file in the project directory
+- Use `load_dotenv()` only in development mode:
+
+```python
+import os
+from dotenv import load_dotenv
+
+if os.environ.get("ENVIRONMENT") == "development":
+ load_dotenv()
+```
+
+## Local Development
+
+### 1. Start the Agentex Backend
+```bash
+# Navigate to the backend directory
+cd agentex
+
+# Start all services using Docker Compose
+make dev
+
+# Optional: In a separate terminal, use lazydocker for a better UI (everything should say "healthy")
+lzd
+```
+
+### 2. Setup Your Agent's requirements/pyproject.toml
+```bash
+agentex uv sync [--group editable-apy]
+source .venv/bin/activate
+
+# OR
+conda create -n {{ project_name }} python=3.12
+conda activate {{ project_name }}
+pip install -r requirements.txt
+```
+### 3. Run Your Agent
+```bash
+# From this directory
+export ENVIRONMENT=development && [uv run] agentex agents run --manifest manifest.yaml
+```
+4. **Interact with your agent**
+
+Option 0: CLI (deprecated - to be replaced once a new CLI is implemented - please use the web UI for now!)
+```bash
+# Submit a task via CLI
+agentex tasks submit --agent {{ agent_name }} --task "Your task here"
+```
+
+Option 1: Web UI
+```bash
+# Start the local web interface
+cd agentex-web
+make dev
+
+# Then open http://localhost:3000 in your browser to chat with your agent
+```
+
+## Development Tips
+
+### Environment Variables
+- Set environment variables in project/.env for any required credentials
+- Or configure them in the manifest.yaml under the `env` section
+- The `.env` file is automatically loaded in development mode
+
+### Local Testing
+- Use `export ENVIRONMENT=development` before running your agent
+- This enables local service discovery and debugging features
+- Your agent will automatically connect to locally running services
+
+### Temporal-Specific Tips
+- Monitor workflows in the Temporal Web UI at http://localhost:8080
+- Use the Temporal CLI for advanced workflow management
+- Check workflow logs for debugging async operations
+
+### Debugging
+- Check agent logs in the terminal where you ran the agent
+- Use the web UI to inspect task history and responses
+- Monitor backend services with `lzd` (LazyDocker)
+- Use Temporal Web UI for workflow debugging
+
+### To build the agent Docker image locally (normally not necessary):
+
+1. First, set up CodeArtifact authentication:
+```bash
+../../setup-build-codeartifact.sh
+```
+
+2. Build the agent image:
+```bash
+agentex agents build --manifest manifest.yaml --secret 'id=codeartifact-pip-conf,src=.codeartifact-pip-conf'
+```
+
+## Advanced Features
+
+### Temporal Workflows
+Extend your agent with sophisticated async workflows:
+
+```python
+# In project/workflow.py
+@workflow.defn
+class MyWorkflow(BaseWorkflow):
+ async def complex_operation(self):
+ # Multi-step async operations
+ # Error handling and retries
+ # State management
+ pass
+```
+
+### Custom Activities
+Add custom activities for external operations:
+
+```python
+# In project/activities.py
+@activity.defn
+async def call_external_api(data):
+ # HTTP requests, database operations, etc.
+ pass
+```
+
+### Integration with External Services
+{% if use_uv %}
+```bash
+# Add service clients
+agentex uv add httpx requests-oauthlib
+
+# Add AI/ML libraries
+agentex uv add openai anthropic transformers
+
+# Add database clients
+agentex uv add asyncpg redis
+```
+{% else %}
+```bash
+# Add to requirements.txt
+echo "httpx" >> requirements.txt
+echo "openai" >> requirements.txt
+echo "asyncpg" >> requirements.txt
+pip install -r requirements.txt
+```
+{% endif %}
+
+## Troubleshooting
+
+### Common Issues
+
+1. **Agent not appearing in web UI**
+ - Check if agent is running on port 8000
+ - Verify `ENVIRONMENT=development` is set
+ - Check agent logs for errors
+
+2. **Temporal workflow issues**
+ - Check Temporal Web UI at http://localhost:8080
+ - Verify Temporal server is running in backend services
+ - Check workflow logs for specific errors
+
+3. **Dependency issues**
+{% if use_uv %}
+ - Run `agentex uv sync` to ensure all dependencies are installed
+ - Check if CodeArtifact authentication is working
+ - Verify temporalio is properly installed
+{% else %}
+ - Run `pip install -r requirements.txt`
+ - Check if all dependencies are correctly listed in requirements.txt
+ - Verify temporalio is installed correctly
+{% endif %}
+
+4. **Port conflicts**
+ - Check if another service is using port 8000
+ - Use `lsof -i :8000` to find conflicting processes
+
+### Temporal-Specific Troubleshooting
+
+1. **Workflow not starting**
+ - Check if Temporal server is running (`docker ps`)
+ - Verify task queue configuration in `run_worker.py`
+ - Check workflow registration in the worker
+
+2. **Activity failures**
+ - Check activity logs in the console
+ - Verify activity registration
+ - Check for timeout issues
+
+Happy building with Temporal! 🚀⚡
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/deploy/example.yaml.j2 b/src/agentex/lib/cli/templates/temporal/deploy/example.yaml.j2
new file mode 100644
index 000000000..d4874d5ba
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/deploy/example.yaml.j2
@@ -0,0 +1,55 @@
+# Example Override Configuration
+# =============================
+# This file shows how to override deployment settings from the global defaults in manifest.yaml.
+# You can create multiple override files for different environments (e.g., staging.yaml, prod.yaml, dev.yaml)
+# Only specify values that differ from the global defaults in manifest.yaml
+
+# Override image tag
+# image:
+# tag: "v1.2.3"
+
+# Override replica count
+# replicaCount: 2
+
+# Environment-specific environment variables
+# env:
+# - name: LOG_LEVEL
+# value: "DEBUG"
+# - name: ENVIRONMENT
+# value: "staging"
+
+# Override resource requirements
+# resources:
+# requests:
+# cpu: "250m"
+# memory: "512Mi"
+# limits:
+# cpu: "500m"
+# memory: "1Gi"
+
+# Advanced: Additional helm chart value overrides
+# Use this for any helm chart values not covered by the simple options above
+# additional_overrides:
+# autoscaling:
+# enabled: true
+# minReplicas: 2
+# maxReplicas: 10
+# targetCPUUtilizationPercentage: 70
+#
+# service:
+# type: LoadBalancer
+#
+# ingress:
+# enabled: true
+# annotations:
+# kubernetes.io/ingress.class: "nginx"
+# hosts:
+# - host: {{ agent_name }}.example.com
+# paths:
+# - path: /
+# pathType: Prefix
+
+# To use this configuration:
+# 1. Copy this file to a new file (e.g., staging.yaml, prod.yaml)
+# 2. Uncomment and modify the values you want to override
+# 3. Deploy with: agentex agents deploy --cluster your-cluster --namespace your-namespace --override-file staging.yaml
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/manifest.yaml.j2 b/src/agentex/lib/cli/templates/temporal/manifest.yaml.j2
new file mode 100644
index 000000000..20f763692
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/manifest.yaml.j2
@@ -0,0 +1,137 @@
+# Agent Manifest Configuration
+# ---------------------------
+# This file defines how your agent should be built and deployed.
+
+# Build Configuration
+# ------------------
+# The build config defines what gets packaged into your agent's Docker image.
+# This same configuration is used whether building locally or remotely.
+#
+# When building:
+# 1. All files from include_paths are collected into a build context
+# 2. The context is filtered by dockerignore rules
+# 3. The Dockerfile uses this context to build your agent's image
+# 4. The image is pushed to a registry and used to run your agent
+build:
+ context:
+ # Root directory for the build context
+ root: ../ # Keep this as the default root
+
+ # Paths to include in the Docker build context
+ # Must include:
+ # - Your agent's directory (your custom agent code)
+ # These paths are collected and sent to the Docker daemon for building
+ include_paths:
+ - {{ project_path_from_build_root }}
+
+ # Path to your agent's Dockerfile
+ # This defines how your agent's image is built from the context
+ # Relative to the root directory
+ dockerfile: {{ project_path_from_build_root }}/Dockerfile
+
+ # Path to your agent's .dockerignore
+ # Filters unnecessary files from the build context
+ # Helps keep build context small and builds fast
+ dockerignore: {{ project_path_from_build_root }}/.dockerignore
+
+
+# Local Development Configuration
+# -----------------------------
+# Only used when running the agent locally
+local_development:
+ agent:
+ port: 8000 # Port where your local ACP server is running
+ host_address: host.docker.internal # Host address for Docker networking (host.docker.internal for Docker, localhost for direct)
+
+ # File paths for local development (relative to this manifest.yaml)
+ paths:
+ # Path to ACP server file
+ # Examples:
+ # project/acp.py (standard)
+ # src/server.py (custom structure)
+ # ../shared/acp.py (shared across projects)
+ # /absolute/path/acp.py (absolute path)
+ acp: project/acp.py
+
+ # Path to temporal worker file
+ # Examples:
+ # project/run_worker.py (standard)
+ # workers/temporal.py (custom structure)
+ # ../shared/worker.py (shared across projects)
+ worker: project/run_worker.py
+
+
+# Agent Configuration
+# -----------------
+agent:
+ # Type of agent - either sync or agentic
+ acp_type: agentic
+
+ # Unique name for your agent
+ # Used for task routing and monitoring
+ name: {{ agent_name }}
+
+ # Description of what your agent does
+ # Helps with documentation and discovery
+ description: {{ description }}
+
+ # Temporal workflow configuration
+ # This enables your agent to run as a Temporal workflow for long-running tasks
+ temporal:
+ enabled: true
+ workflows:
+ # Name of the workflow class
+ # Must match the @workflow.defn name in your workflow.py
+ - name: {{ workflow_name }}
+
+ # Queue name for task distribution
+ # Used by Temporal to route tasks to your agent
+ # Convention: _task_queue
+ queue_name: {{ queue_name }}
+
+ # Optional: Credentials mapping
+ # Maps Kubernetes secrets to environment variables
+ # Common credentials include:
+ # credentials:
+ # - env_var_name: OPENAI_API_KEY
+ # secret_name: openai-api-key
+ # secret_key: api-key
+
+ # Optional: Set Environment variables for running your agent locally as well
+ # as for deployment later on
+ # env:
+ # OPENAI_API_KEY: ""
+ # OPENAI_BASE_URL: ""
+ # OPENAI_ORG_ID: ""
+
+
+# Deployment Configuration
+# -----------------------
+# Configuration for deploying your agent to Kubernetes clusters
+deployment:
+ # Container image configuration
+ image:
+ repository: "" # Update with your container registry
+ tag: "latest" # Default tag, should be versioned in production
+
+ imagePullSecrets:
+ - name: my-registry-secret # Update with your image pull secret name
+
+ # Global deployment settings that apply to all clusters
+ # These can be overridden using --override-file with custom configuration files
+ global:
+ agent:
+ name: "{{ agent_name }}"
+ description: "{{ description }}"
+
+ # Default replica count
+ replicaCount: 1
+
+ # Default resource requirements
+ resources:
+ requests:
+ cpu: "500m"
+ memory: "1Gi"
+ limits:
+ cpu: "1000m"
+ memory: "2Gi"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/project/acp.py.j2 b/src/agentex/lib/cli/templates/temporal/project/acp.py.j2
new file mode 100644
index 000000000..6e4cb2f52
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/project/acp.py.j2
@@ -0,0 +1,30 @@
+import os
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.types.fastacp import TemporalACPConfig
+
+
+# Create the ACP server
+acp = FastACP.create(
+ acp_type="agentic",
+ config=TemporalACPConfig(
+ # When deployed to the cluster, the Temporal address will automatically be set to the cluster address
+ # For local development, we set the address manually to talk to the local Temporal service set up via docker compose
+ type="temporal",
+ temporal_address=os.getenv("TEMPORAL_ADDRESS", "localhost:7233")
+ )
+)
+
+
+# Notice that we don't need to register any handlers when we use type="temporal"
+# If you look at the code in agentex.sdk.fastacp.impl.temporal_acp
+# You can see that these handlers are automatically registered when the ACP is created
+
+# @acp.on_task_create
+# This will be handled by the method in your workflow that is decorated with @workflow.run
+
+# @acp.on_task_event_send
+# This will be handled by the method in your workflow that is decorated with @workflow.signal(name=SignalName.RECEIVE_MESSAGE)
+
+# @acp.on_task_cancel
+# This does not need to be handled by your workflow.
+# It is automatically handled by the temporal client which cancels the workflow directly
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/project/run_worker.py.j2 b/src/agentex/lib/cli/templates/temporal/project/run_worker.py.j2
new file mode 100644
index 000000000..6f1fa8e7e
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/project/run_worker.py.j2
@@ -0,0 +1,33 @@
+import asyncio
+import os
+
+from agentex.lib.core.temporal.activities import get_all_activities
+from agentex.lib.core.temporal.workers.worker import AgentexWorker
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.environment_variables import EnvironmentVariables
+
+from workflow import {{ workflow_class }}
+
+
+environment_variables = EnvironmentVariables.refresh()
+
+logger = make_logger(__name__)
+
+
+async def main():
+ task_queue_name = environment_variables.WORKFLOW_TASK_QUEUE
+ if task_queue_name is None:
+ raise ValueError("WORKFLOW_TASK_QUEUE is not set")
+
+ # Create a worker with automatic tracing
+ worker = AgentexWorker(
+ task_queue=task_queue_name,
+ )
+
+ await worker.run(
+ activities=get_all_activities(),
+ workflow={{ workflow_class }},
+ )
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/project/workflow.py.j2 b/src/agentex/lib/cli/templates/temporal/project/workflow.py.j2
new file mode 100644
index 000000000..2a0aa9c87
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/project/workflow.py.j2
@@ -0,0 +1,66 @@
+import json
+
+from temporalio import workflow
+
+from agentex.lib import adk
+from agentex.lib.types.acp import CreateTaskParams, SendEventParams
+from agentex.lib.core.temporal.workflows.workflow import BaseWorkflow
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.utils.logging import make_logger
+from agentex.types.task_message import TextContent
+from agentex.lib.environment_variables import EnvironmentVariables
+
+environment_variables = EnvironmentVariables.refresh()
+
+if environment_variables.WORKFLOW_NAME is None:
+ raise ValueError("Environment variable WORKFLOW_NAME is not set")
+
+if environment_variables.AGENT_NAME is None:
+ raise ValueError("Environment variable AGENT_NAME is not set")
+
+logger = make_logger(__name__)
+
+@workflow.defn(name=environment_variables.WORKFLOW_NAME)
+class {{ workflow_class }}(BaseWorkflow):
+ """
+ Minimal async workflow template for AgentEx Temporal agents.
+ """
+ def __init__(self):
+ super().__init__(display_name=environment_variables.AGENT_NAME)
+ self._complete_task = False
+
+ @workflow.signal(name=SignalName.RECEIVE_EVENT)
+ async def on_task_event_send(self, params: SendEventParams) -> None:
+ logger.info(f"Received task message instruction: {params}")
+
+ # 2. Echo back the client's message to show it in the UI. This is not done by default so the agent developer has full control over what is shown to the user.
+ await adk.messages.create(task_id=params.task.id, content=params.event.content)
+
+ # 3. Send a simple response message.
+ # In future tutorials, this is where we'll add more sophisticated response logic.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your message. I can't respond right now, but in future tutorials we'll see how you can get me to intelligently respond to your message.",
+ ),
+ )
+
+ @workflow.run
+ async def on_task_create(self, params: CreateTaskParams) -> str:
+ logger.info(f"Received task create params: {params}")
+
+ # 1. Acknowledge that the task has been created.
+ await adk.messages.create(
+ task_id=params.task.id,
+ content=TextContent(
+ author="agent",
+ content=f"Hello! I've received your task. Normally you can do some state initialization here, or just pass and do nothing until you get your first event. For now I'm just acknowledging that I've received a task with the following params:\n\n{json.dumps(params.params, indent=2)}.\n\nYou should only see this message once, when the task is created. All subsequent events will be handled by the `on_task_event_send` handler.",
+ ),
+ )
+
+ await workflow.wait_condition(
+ lambda: self._complete_task,
+ timeout=None, # Set a timeout if you want to prevent the task from running indefinitely. Generally this is not needed. Temporal can run hundreds of millions of workflows in parallel and more. Only do this if you have a specific reason to do so.
+ )
+ return "Task completed"
diff --git a/src/agentex/lib/cli/templates/temporal/pyproject.toml.j2 b/src/agentex/lib/cli/templates/temporal/pyproject.toml.j2
new file mode 100644
index 000000000..5a4c873e8
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/pyproject.toml.j2
@@ -0,0 +1,42 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "{{ project_name }}"
+version = "0.1.0"
+description = "{{ description }}"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "agentex-sdk",
+ "scale-gp",
+ "temporalio",
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest",
+ "black",
+ "isort",
+ "flake8",
+]
+
+[dependency-groups]
+editable-apy = [
+ "agentex-sdk @ file://{{ agentex_py_path }}",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["project"]
+
+[tool.black]
+line-length = 88
+target-version = ['py312']
+
+[tool.isort]
+profile = "black"
+line_length = 88
+
+[[tool.uv.index]]
+url = "https://scale-307185671274.d.codeartifact.us-west-2.amazonaws.com/pypi/scale-pypi/simple/"
\ No newline at end of file
diff --git a/src/agentex/lib/cli/templates/temporal/requirements.txt.j2 b/src/agentex/lib/cli/templates/temporal/requirements.txt.j2
new file mode 100644
index 000000000..0b8ae19b3
--- /dev/null
+++ b/src/agentex/lib/cli/templates/temporal/requirements.txt.j2
@@ -0,0 +1,5 @@
+# Install agentex-sdk from local path
+agentex-sdk
+
+# Scale GenAI Platform Python SDK
+scale-gp
diff --git a/src/agentex/lib/cli/utils/cli_utils.py b/src/agentex/lib/cli/utils/cli_utils.py
new file mode 100644
index 000000000..46be43759
--- /dev/null
+++ b/src/agentex/lib/cli/utils/cli_utils.py
@@ -0,0 +1,14 @@
+import typer
+from rich.console import Console
+
+console = Console()
+
+
+def handle_questionary_cancellation(
+ result: str | None, operation: str = "operation"
+) -> str:
+ """Handle questionary cancellation by checking for None and exiting gracefully"""
+ if result is None:
+ console.print(f"[yellow]{operation.capitalize()} cancelled by user[/yellow]")
+ raise typer.Exit(0)
+ return result
diff --git a/src/agentex/lib/cli/utils/credential_utils.py b/src/agentex/lib/cli/utils/credential_utils.py
new file mode 100644
index 000000000..fae4dea6d
--- /dev/null
+++ b/src/agentex/lib/cli/utils/credential_utils.py
@@ -0,0 +1,103 @@
+import subprocess
+
+from rich.console import Console
+from rich.prompt import Confirm, Prompt
+
+from agentex.lib.types.credentials import CredentialMapping
+
+console = Console()
+
+
+def check_secret_exists(secret_name: str, namespace: str) -> bool:
+ """Check if a Kubernetes secret exists in the given namespace."""
+ try:
+ result = subprocess.run(
+ ["kubectl", "get", "secret", secret_name, "-n", namespace],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ return result.returncode == 0
+ except Exception:
+ return False
+
+
+def create_env_var_secret(credential: CredentialMapping, namespace: str) -> bool:
+ """Create a generic secret for environment variable credentials."""
+ console.print(
+ f"[yellow]Secret '{credential.secret_name}' not found in namespace '{namespace}'[/yellow]"
+ )
+
+ if not Confirm.ask(
+ f"Would you like to create the secret '{credential.secret_name}'?"
+ ):
+ return False
+
+ # Prompt for the secret value
+ secret_value = Prompt.ask(
+ f"Enter the value for '{credential.secret_key}'", password=True
+ )
+
+ try:
+ # Create the secret using kubectl
+ subprocess.run(
+ [
+ "kubectl",
+ "create",
+ "secret",
+ "generic",
+ credential.secret_name,
+ f"--from-literal={credential.secret_key}={secret_value}",
+ "-n",
+ namespace,
+ ],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+
+ console.print(
+ f"[green]✓ Created secret '{credential.secret_name}' in namespace '{namespace}'[/green]"
+ )
+ return True
+
+ except subprocess.CalledProcessError as e:
+ console.print(f"[red]✗ Failed to create secret: {e.stderr}[/red]")
+ return False
+
+
+# def create_image_pull_secret(credential: ImagePullCredential, namespace: str) -> bool:
+# """Create an image pull secret with interactive prompts."""
+# console.print(f"[yellow]Image pull secret '{credential.secret_name}' not found in namespace '{namespace}'[/yellow]")
+
+# if not Confirm.ask(f"Would you like to create the image pull secret '{credential.secret_name}'?"):
+# return False
+
+# # Prompt for registry details
+# registry_server = Prompt.ask("Docker registry server (e.g., docker.io, gcr.io)")
+# username = Prompt.ask("Username")
+# password = Prompt.ask("Password", password=True)
+# email = Prompt.ask("Email (optional)", default="")
+
+# try:
+# # Create the image pull secret using kubectl
+# cmd = [
+# "kubectl", "create", "secret", "docker-registry",
+# credential.secret_name,
+# f"--docker-server={registry_server}",
+# f"--docker-username={username}",
+# f"--docker-password={password}",
+# "-n", namespace
+# ]
+
+# if email:
+# cmd.append(f"--docker-email={email}")
+
+# result = subprocess.run(cmd, capture_output=True, text=True, check=True)
+
+# console.print(f"[green]✓ Created image pull secret '{credential.secret_name}' in namespace '{namespace}'[/green]")
+# return True
+
+# except subprocess.CalledProcessError as e:
+# console.print(f"[red]✗ Failed to create image pull secret: {e.stderr}[/red]")
+# return False
diff --git a/src/agentex/lib/cli/utils/exceptions.py b/src/agentex/lib/cli/utils/exceptions.py
new file mode 100644
index 000000000..efd41b6c5
--- /dev/null
+++ b/src/agentex/lib/cli/utils/exceptions.py
@@ -0,0 +1,6 @@
+class HelmError(Exception):
+ """An error occurred during helm operations"""
+
+
+class DeploymentError(Exception):
+ """An error occurred during deployment"""
diff --git a/src/agentex/lib/cli/utils/kubectl_utils.py b/src/agentex/lib/cli/utils/kubectl_utils.py
new file mode 100644
index 000000000..abdcf2bf7
--- /dev/null
+++ b/src/agentex/lib/cli/utils/kubectl_utils.py
@@ -0,0 +1,135 @@
+import subprocess
+
+from kubernetes import client, config
+from kubernetes.client.rest import ApiException
+from rich.console import Console
+
+from agentex.lib.cli.utils.exceptions import DeploymentError
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+
+class KubernetesClientManager:
+ """Manages Kubernetes clients for different contexts"""
+
+ def __init__(self):
+ self._clients: dict[str, client.CoreV1Api] = {}
+
+ def get_client(self, context: str | None = None) -> client.CoreV1Api:
+ """Get a Kubernetes client for the specified context"""
+ if context is None:
+ context = get_current_context()
+
+ if context not in self._clients:
+ try:
+ # Load config for specific context
+ config.load_kube_config(context=context)
+ self._clients[context] = client.CoreV1Api()
+ logger.info(f"Created Kubernetes client for context: {context}")
+ except Exception as e:
+ raise DeploymentError(
+ f"Failed to create Kubernetes client for context '{context}': {e}"
+ ) from e
+
+ return self._clients[context]
+
+ def clear_cache(self):
+ """Clear cached clients (useful when contexts change)"""
+ self._clients.clear()
+
+
+def get_current_context() -> str:
+ """Get the current kubectl context"""
+ try:
+ contexts, active_context = config.list_kube_config_contexts()
+ if active_context is None:
+ raise DeploymentError("No active kubectl context found")
+ return active_context["name"]
+ except Exception as e:
+ raise DeploymentError(f"Failed to get current kubectl context: {e}") from e
+
+
+# Global client manager instance
+_client_manager = KubernetesClientManager()
+
+
+def list_available_contexts() -> list[str]:
+ """List all available kubectl contexts"""
+ try:
+ contexts, _ = config.list_kube_config_contexts()
+ return [ctx["name"] for ctx in contexts]
+ except Exception as e:
+ raise DeploymentError(f"Failed to list kubectl contexts: {e}") from e
+
+
+def validate_cluster_context(cluster_name: str) -> bool:
+ """Check if a cluster name corresponds to an available kubectl context"""
+ try:
+ available_contexts = list_available_contexts()
+ return cluster_name in available_contexts
+ except DeploymentError:
+ return False
+
+
+def switch_kubectl_context(cluster_name: str) -> None:
+ """Switch to the specified kubectl context"""
+ try:
+ # Use subprocess for context switching as it's a local kubeconfig operation
+ subprocess.run(
+ ["kubectl", "config", "use-context", cluster_name],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ # Clear client cache since context changed
+ _client_manager.clear_cache()
+ logger.info(f"Switched to kubectl context: {cluster_name}")
+ except (subprocess.CalledProcessError, FileNotFoundError) as e:
+ raise DeploymentError(
+ f"Failed to switch to kubectl context '{cluster_name}': {e}"
+ ) from e
+
+
+def validate_namespace(namespace: str, context: str | None = None) -> bool:
+ """Check if a namespace exists in the specified cluster context"""
+ try:
+ k8s_client = _client_manager.get_client(context)
+ k8s_client.read_namespace(name=namespace)
+ return True
+ except ApiException as e:
+ if e.status == 404:
+ return False
+ raise DeploymentError(f"Failed to validate namespace '{namespace}': {e}") from e
+ except Exception as e:
+ raise DeploymentError(f"Failed to validate namespace '{namespace}': {e}") from e
+
+
+def check_and_switch_cluster_context(cluster_name: str) -> None:
+ """Check and switch to the specified kubectl context"""
+ # Validate cluster context
+ if not validate_cluster_context(cluster_name):
+ available_contexts = list_available_contexts()
+ raise DeploymentError(
+ f"Cluster '{cluster_name}' not found in kubectl contexts.\n"
+ f"Available contexts: {', '.join(available_contexts)}\n"
+ f"Please ensure you have a valid kubeconfig for this cluster."
+ )
+
+ # Switch to the specified cluster context
+ current_context = get_current_context()
+ if current_context != cluster_name:
+ console.print(
+ f"[blue]ℹ[/blue] Switching from context '{current_context}' to '{cluster_name}'"
+ )
+ switch_kubectl_context(cluster_name)
+ else:
+ console.print(
+ f"[blue]ℹ[/blue] Using current kubectl context: [bold]{cluster_name}[/bold]"
+ )
+
+
+def get_k8s_client(context: str | None = None) -> client.CoreV1Api:
+ """Get a Kubernetes client for the specified context (or current context if None)"""
+ return _client_manager.get_client(context)
diff --git a/src/agentex/lib/cli/utils/kubernetes_secrets_utils.py b/src/agentex/lib/cli/utils/kubernetes_secrets_utils.py
new file mode 100644
index 000000000..12a843bf8
--- /dev/null
+++ b/src/agentex/lib/cli/utils/kubernetes_secrets_utils.py
@@ -0,0 +1,185 @@
+import base64
+
+from kubernetes import client
+from kubernetes.client.rest import ApiException
+from rich.console import Console
+
+from agentex.lib.cli.utils.kubectl_utils import get_k8s_client
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+console = Console()
+
+KUBERNETES_SECRET_TYPE_OPAQUE = "Opaque"
+KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON = "kubernetes.io/dockerconfigjson"
+KUBERNETES_SECRET_TYPE_BASIC_AUTH = "kubernetes.io/basic-auth"
+KUBERNETES_SECRET_TYPE_TLS = "kubernetes.io/tls"
+
+VALID_SECRET_TYPES = [
+ KUBERNETES_SECRET_TYPE_OPAQUE,
+ KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON,
+ KUBERNETES_SECRET_TYPE_BASIC_AUTH,
+ KUBERNETES_SECRET_TYPE_TLS,
+]
+
+KUBERNETES_SECRET_TO_MANIFEST_KEY = {
+ KUBERNETES_SECRET_TYPE_OPAQUE: "credentials",
+ KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON: "imagePullSecrets",
+}
+
+
+def _create_secret_object(
+ name: str, data: dict[str, str], secret_type: str = KUBERNETES_SECRET_TYPE_OPAQUE
+) -> client.V1Secret:
+ """Helper to create a V1Secret object with multiple key-value pairs"""
+ return client.V1Secret(
+ metadata=client.V1ObjectMeta(name=name),
+ type=secret_type,
+ string_data=data, # Use string_data for automatic base64 encoding
+ )
+
+
+def create_secret_with_data(
+ name: str, data: dict[str, str], namespace: str, context: str | None = None
+) -> None:
+ """Create a new Kubernetes secret with multiple key-value pairs"""
+ v1 = get_k8s_client(context)
+
+ try:
+ # Check if secret exists
+ v1.read_namespaced_secret(name=name, namespace=namespace)
+ console.print(
+ f"[red]Error: Secret '{name}' already exists in namespace '{namespace}'[/red]"
+ )
+ return
+ except ApiException as e:
+ if e.status != 404: # If error is not "Not Found"
+ raise
+
+ # Create the secret
+ secret = _create_secret_object(name, data)
+
+ try:
+ v1.create_namespaced_secret(namespace=namespace, body=secret)
+ console.print(
+ f"[green]Created secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as e:
+ console.print(f"[red]Error creating secret: {e.reason}[/red]")
+ raise RuntimeError(f"Failed to create secret: {str(e)}") from e
+
+
+def update_secret_with_data(
+ name: str, data: dict[str, str], namespace: str, context: str | None = None
+) -> None:
+ """Create or update a Kubernetes secret with multiple key-value pairs"""
+ v1 = get_k8s_client(context)
+ secret = _create_secret_object(name, data)
+
+ try:
+ # Try to update first
+ v1.replace_namespaced_secret(name=name, namespace=namespace, body=secret)
+ console.print(
+ f"[green]Updated secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as e:
+ if e.status == 404:
+ # Secret doesn't exist, create it
+ try:
+ v1.create_namespaced_secret(namespace=namespace, body=secret)
+ console.print(
+ f"[green]Created secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as create_error:
+ console.print(
+ f"[red]Error creating secret: {create_error.reason}[/red]"
+ )
+ raise RuntimeError(
+ f"Failed to create secret: {str(create_error)}"
+ ) from create_error
+ else:
+ console.print(f"[red]Error updating secret: {e.reason}[/red]")
+ raise RuntimeError(f"Failed to update secret: {str(e)}") from e
+
+
+def create_image_pull_secret_with_data(
+ name: str, data: dict[str, str], namespace: str, context: str | None = None
+) -> None:
+ """Create a new Kubernetes image pull secret with dockerconfigjson type"""
+ v1 = get_k8s_client(context)
+
+ try:
+ # Check if secret exists
+ v1.read_namespaced_secret(name=name, namespace=namespace)
+ console.print(
+ f"[red]Error: Secret '{name}' already exists in namespace '{namespace}'[/red]"
+ )
+ return
+ except ApiException as e:
+ if e.status != 404: # If error is not "Not Found"
+ raise
+
+ # Create the secret with dockerconfigjson type
+ secret = _create_secret_object(name, data, KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON)
+
+ try:
+ v1.create_namespaced_secret(namespace=namespace, body=secret)
+ console.print(
+ f"[green]Created image pull secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as e:
+ console.print(f"[red]Error creating image pull secret: {e.reason}[/red]")
+ raise RuntimeError(f"Failed to create image pull secret: {str(e)}") from e
+
+
+def update_image_pull_secret_with_data(
+ name: str, data: dict[str, str], namespace: str, context: str | None = None
+) -> None:
+ """Create or update a Kubernetes image pull secret with dockerconfigjson type"""
+ v1 = get_k8s_client(context)
+ secret = _create_secret_object(name, data, KUBERNETES_SECRET_TYPE_DOCKERCONFIGJSON)
+
+ try:
+ # Try to update first
+ v1.replace_namespaced_secret(name=name, namespace=namespace, body=secret)
+ console.print(
+ f"[green]Updated image pull secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as e:
+ if e.status == 404:
+ # Secret doesn't exist, create it
+ try:
+ v1.create_namespaced_secret(namespace=namespace, body=secret)
+ console.print(
+ f"[green]Created image pull secret '{name}' in namespace '{namespace}' with {len(data)} keys[/green]"
+ )
+ except ApiException as create_error:
+ console.print(
+ f"[red]Error creating image pull secret: {create_error.reason}[/red]"
+ )
+ raise RuntimeError(
+ f"Failed to create image pull secret: {str(create_error)}"
+ ) from create_error
+ else:
+ console.print(f"[red]Error updating image pull secret: {e.reason}[/red]")
+ raise RuntimeError(f"Failed to update image pull secret: {str(e)}") from e
+
+
+def get_secret_data(
+ name: str, namespace: str, context: str | None = None
+) -> dict[str, str]:
+ """Get the actual data from a secret"""
+ v1 = get_k8s_client(context)
+ try:
+ secret = v1.read_namespaced_secret(name=name, namespace=namespace)
+ if secret.data:
+ # Decode base64 data
+ return {
+ key: base64.b64decode(value).decode("utf-8")
+ for key, value in secret.data.items()
+ }
+ return {}
+ except ApiException as e:
+ if e.status == 404:
+ return {}
+ raise RuntimeError(f"Failed to get secret data: {str(e)}") from e
diff --git a/src/agentex/lib/core/__init__.py b/src/agentex/lib/core/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/adapters/__init__.py b/src/agentex/lib/core/adapters/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/adapters/llm/__init__.py b/src/agentex/lib/core/adapters/llm/__init__.py
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/agentex/lib/core/adapters/llm/__init__.py
@@ -0,0 +1 @@
+
diff --git a/src/agentex/lib/core/adapters/llm/adapter_litellm.py b/src/agentex/lib/core/adapters/llm/adapter_litellm.py
new file mode 100644
index 000000000..cbd69585e
--- /dev/null
+++ b/src/agentex/lib/core/adapters/llm/adapter_litellm.py
@@ -0,0 +1,46 @@
+from collections.abc import AsyncGenerator, Generator
+
+import litellm as llm
+
+from agentex.lib.core.adapters.llm.port import LLMGateway
+from agentex.lib.types.llm_messages import Completion
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class LiteLLMGateway(LLMGateway):
+ def completion(self, *args, **kwargs) -> Completion:
+ if kwargs.get("stream", True):
+ raise ValueError(
+ "Please use self.completion_stream instead of self.completion to stream responses"
+ )
+
+ response = llm.completion(*args, **kwargs)
+ return Completion.model_validate(response)
+
+ def completion_stream(self, *args, **kwargs) -> Generator[Completion, None, None]:
+ if not kwargs.get("stream"):
+ raise ValueError("To use streaming, please set stream=True in the kwargs")
+
+ for chunk in llm.completion(*args, **kwargs):
+ yield Completion.model_validate(chunk)
+
+ async def acompletion(self, *args, **kwargs) -> Completion:
+ if kwargs.get("stream", True):
+ raise ValueError(
+ "Please use self.acompletion_stream instead of self.acompletion to stream responses"
+ )
+
+ # Return a single completion for non-streaming
+ response = await llm.acompletion(*args, **kwargs)
+ return Completion.model_validate(response)
+
+ async def acompletion_stream(
+ self, *args, **kwargs
+ ) -> AsyncGenerator[Completion, None]:
+ if not kwargs.get("stream"):
+ raise ValueError("To use streaming, please set stream=True in the kwargs")
+
+ async for chunk in await llm.acompletion(*args, **kwargs):
+ yield Completion.model_validate(chunk)
diff --git a/src/agentex/lib/core/adapters/llm/adapter_sgp.py b/src/agentex/lib/core/adapters/llm/adapter_sgp.py
new file mode 100644
index 000000000..a14e66a29
--- /dev/null
+++ b/src/agentex/lib/core/adapters/llm/adapter_sgp.py
@@ -0,0 +1,55 @@
+import os
+from collections.abc import AsyncGenerator, Generator
+
+from scale_gp import AsyncSGPClient, SGPClient
+
+from agentex.lib.core.adapters.llm.port import LLMGateway
+from agentex.lib.types.llm_messages import Completion
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class SGPLLMGateway(LLMGateway):
+ def __init__(self, sgp_api_key: str | None = None):
+ self.sync_client = SGPClient(api_key=os.environ.get("SGP_API_KEY", sgp_api_key))
+ self.async_client = AsyncSGPClient(
+ api_key=os.environ.get("SGP_API_KEY", sgp_api_key)
+ )
+
+ def completion(self, *args, **kwargs) -> Completion:
+ if kwargs.get("stream", True):
+ raise ValueError(
+ "Please use self.completion_stream instead of self.completion to stream responses"
+ )
+
+ response = self.sync_client.beta.chat.completions.create(*args, **kwargs)
+ return Completion.model_validate(response)
+
+ def completion_stream(self, *args, **kwargs) -> Generator[Completion, None, None]:
+ if not kwargs.get("stream"):
+ raise ValueError("To use streaming, please set stream=True in the kwargs")
+
+ for chunk in self.sync_client.beta.chat.completions.create(*args, **kwargs):
+ yield Completion.model_validate(chunk)
+
+ async def acompletion(self, *args, **kwargs) -> Completion:
+ if kwargs.get("stream", True):
+ raise ValueError(
+ "Please use self.acompletion_stream instead of self.acompletion to stream responses"
+ )
+
+ # Return a single completion for non-streaming
+ response = await self.async_client.beta.chat.completions.create(*args, **kwargs)
+ return Completion.model_validate(response)
+
+ async def acompletion_stream(
+ self, *args, **kwargs
+ ) -> AsyncGenerator[Completion, None]:
+ if not kwargs.get("stream"):
+ raise ValueError("To use streaming, please set stream=True in the kwargs")
+
+ async for chunk in await self.async_client.beta.chat.completions.create(
+ *args, **kwargs
+ ):
+ yield Completion.model_validate(chunk)
diff --git a/src/agentex/lib/core/adapters/llm/port.py b/src/agentex/lib/core/adapters/llm/port.py
new file mode 100644
index 000000000..f9eeebeb0
--- /dev/null
+++ b/src/agentex/lib/core/adapters/llm/port.py
@@ -0,0 +1,24 @@
+from abc import ABC, abstractmethod
+from collections.abc import AsyncGenerator, Generator
+
+from agentex.lib.types.llm_messages import Completion
+
+
+class LLMGateway(ABC):
+ @abstractmethod
+ def completion(self, *args, **kwargs) -> Completion:
+ raise NotImplementedError
+
+ @abstractmethod
+ def completion_stream(self, *args, **kwargs) -> Generator[Completion, None, None]:
+ raise NotImplementedError
+
+ @abstractmethod
+ async def acompletion(self, *args, **kwargs) -> Completion:
+ raise NotImplementedError
+
+ @abstractmethod
+ async def acompletion_stream(
+ self, *args, **kwargs
+ ) -> AsyncGenerator[Completion, None]:
+ raise NotImplementedError
diff --git a/src/agentex/lib/core/adapters/streams/adapter_redis.py b/src/agentex/lib/core/adapters/streams/adapter_redis.py
new file mode 100644
index 000000000..83ac9c396
--- /dev/null
+++ b/src/agentex/lib/core/adapters/streams/adapter_redis.py
@@ -0,0 +1,128 @@
+import asyncio
+import json
+import os
+from collections.abc import AsyncIterator
+from typing import Annotated, Any
+
+import redis.asyncio as redis
+from fastapi import Depends
+
+from agentex.lib.core.adapters.streams.port import EventStreamRepository
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class RedisEventStreamRepository(EventStreamRepository):
+ """
+ A simplified Redis implementation of the EventStreamRepository interface.
+ Optimized for text/JSON streaming with SSE.
+ """
+
+ def __init__(self, redis_url: str | None = None):
+ # Get Redis URL from environment if not provided
+ self.redis_url = redis_url or os.environ.get(
+ "REDIS_URL", "redis://localhost:6379"
+ )
+ self.redis = redis.from_url(self.redis_url)
+
+ async def send_event(self, topic: str, event: dict[str, Any]) -> str:
+ """
+ Send an event to a Redis stream.
+
+ Args:
+ topic: The stream topic/name
+ event: The event data (will be JSON serialized)
+
+ Returns:
+ The message ID from Redis
+ """
+ try:
+ # Simple JSON serialization
+ event_json = json.dumps(event)
+
+ # # Uncomment to debug
+ # logger.info(f"Sending event to Redis stream {topic}: {event_json}")
+
+ # Add to Redis stream with a reasonable max length
+ message_id = await self.redis.xadd(
+ name=topic,
+ fields={"data": event_json},
+ )
+
+ return message_id
+ except Exception as e:
+ logger.error(f"Error publishing to Redis stream {topic}: {e}")
+ raise
+
+ async def subscribe(
+ self, topic: str, last_id: str = "$"
+ ) -> AsyncIterator[dict[str, Any]]:
+ """
+ Subscribe to a Redis stream and yield events as they come in.
+
+ Args:
+ topic: The stream topic to subscribe to
+ last_id: Where to start reading from:
+ "$" = only new messages (default)
+ "0" = all messages from the beginning
+ "" = messages after the specified ID
+
+ Yields:
+ Parsed event data
+ """
+
+ current_id = last_id
+
+ while True:
+ try:
+ # Read new messages with a reasonable block time
+ streams = {topic: current_id}
+ response = await self.redis.xread(
+ streams=streams,
+ count=10, # Get up to 10 messages at a time (reduces overprocessing)
+ block=2000, # Wait up to 2 seconds for new messages
+ )
+
+ if response:
+ for _, messages in response:
+ for message_id, fields in messages:
+ # Update the last_id for next iteration
+ current_id = message_id
+
+ # Extract and parse the JSON data
+ if b"data" in fields:
+ try:
+ data_str = fields[b"data"].decode("utf-8")
+ event = json.loads(data_str)
+ yield event
+ except Exception as e:
+ logger.warning(
+ f"Failed to parse event from Redis stream: {e}"
+ )
+
+ # Small sleep to prevent tight loops
+ await asyncio.sleep(0.01)
+
+ except Exception as e:
+ logger.error(f"Error reading from Redis stream: {e}")
+ await asyncio.sleep(1) # Back off on errors
+
+ async def cleanup_stream(self, topic: str) -> None:
+ """
+ Clean up a Redis stream.
+
+ Args:
+ topic: The stream topic to clean up
+ """
+ try:
+ await self.redis.delete(topic)
+ logger.info(f"Cleaned up Redis stream: {topic}")
+ except Exception as e:
+ logger.error(f"Error cleaning up Redis stream {topic}: {e}")
+ raise
+
+
+DRedisEventStreamRepository = Annotated[
+ RedisEventStreamRepository | None, Depends(RedisEventStreamRepository)
+]
diff --git a/src/agentex/lib/core/adapters/streams/port.py b/src/agentex/lib/core/adapters/streams/port.py
new file mode 100644
index 000000000..4fff08f52
--- /dev/null
+++ b/src/agentex/lib/core/adapters/streams/port.py
@@ -0,0 +1,50 @@
+from abc import ABC, abstractmethod
+from collections.abc import AsyncIterator
+from typing import Any
+
+
+class EventStreamRepository(ABC):
+ """
+ Interface for event streaming repositories.
+ Used to publish and subscribe to event streams.
+ """
+
+ @abstractmethod
+ async def send_event(self, topic: str, event: dict[str, Any]) -> str:
+ """
+ Send an event to a stream.
+
+ Args:
+ topic: The stream topic/name
+ event: The event data
+
+ Returns:
+ The message ID or other identifier
+ """
+ raise NotImplementedError
+
+ @abstractmethod
+ async def subscribe(
+ self, topic: str, last_id: str = "$"
+ ) -> AsyncIterator[dict[str, Any]]:
+ """
+ Subscribe to a stream and yield events as they come in.
+
+ Args:
+ topic: The stream topic to subscribe to
+ last_id: Where to start reading from
+
+ Yields:
+ Event data
+ """
+ raise NotImplementedError
+
+ @abstractmethod
+ async def cleanup_stream(self, topic: str) -> None:
+ """
+ Clean up a stream.
+
+ Args:
+ topic: The stream topic to clean up
+ """
+ raise NotImplementedError
diff --git a/src/agentex/lib/core/clients/__init__.py b/src/agentex/lib/core/clients/__init__.py
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/agentex/lib/core/clients/__init__.py
@@ -0,0 +1 @@
+
diff --git a/src/agentex/lib/core/clients/temporal/__init__.py b/src/agentex/lib/core/clients/temporal/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/clients/temporal/temporal_client.py b/src/agentex/lib/core/clients/temporal/temporal_client.py
new file mode 100644
index 000000000..16137eb7a
--- /dev/null
+++ b/src/agentex/lib/core/clients/temporal/temporal_client.py
@@ -0,0 +1,181 @@
+from collections.abc import Callable
+from datetime import timedelta
+from typing import Any
+
+from temporalio.client import Client, WorkflowExecutionStatus
+from temporalio.common import RetryPolicy as TemporalRetryPolicy
+from temporalio.common import WorkflowIDReusePolicy
+from temporalio.service import RPCError, RPCStatusCode
+
+from agentex.lib.core.clients.temporal.types import (
+ DuplicateWorkflowPolicy,
+ RetryPolicy,
+ TaskStatus,
+ WorkflowState,
+)
+from agentex.lib.core.clients.temporal.utils import get_temporal_client
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+
+logger = make_logger(__name__)
+
+DEFAULT_RETRY_POLICY = RetryPolicy(maximum_attempts=1)
+
+
+TEMPORAL_STATUS_TO_UPLOAD_STATUS_AND_REASON = {
+ # TODO: Support canceled status
+ WorkflowExecutionStatus.CANCELED: WorkflowState(
+ status=TaskStatus.CANCELED,
+ reason="Task canceled by the user.",
+ is_terminal=True,
+ ),
+ WorkflowExecutionStatus.COMPLETED: WorkflowState(
+ status=TaskStatus.COMPLETED,
+ reason="Task completed successfully.",
+ is_terminal=True,
+ ),
+ WorkflowExecutionStatus.FAILED: WorkflowState(
+ status=TaskStatus.FAILED,
+ reason="Task encountered terminal failure. "
+ "Please contact support if retrying does not resolve the issue.",
+ is_terminal=True,
+ ),
+ WorkflowExecutionStatus.RUNNING: WorkflowState(
+ status=TaskStatus.RUNNING,
+ reason="Task is running.",
+ is_terminal=False,
+ ),
+ WorkflowExecutionStatus.TERMINATED: WorkflowState(
+ status=TaskStatus.CANCELED,
+ reason="Task canceled by the user.",
+ is_terminal=True,
+ ),
+ WorkflowExecutionStatus.TIMED_OUT: WorkflowState(
+ status=TaskStatus.FAILED,
+ reason="Task timed out. Please contact support if retrying does not resolve the issue",
+ is_terminal=True,
+ ),
+ WorkflowExecutionStatus.CONTINUED_AS_NEW: WorkflowState(
+ status=TaskStatus.RUNNING,
+ reason="Task is running.",
+ is_terminal=False,
+ ),
+}
+
+DUPLICATE_POLICY_TO_ID_REUSE_POLICY = {
+ DuplicateWorkflowPolicy.ALLOW_DUPLICATE: WorkflowIDReusePolicy.ALLOW_DUPLICATE,
+ DuplicateWorkflowPolicy.ALLOW_DUPLICATE_FAILED_ONLY: WorkflowIDReusePolicy.ALLOW_DUPLICATE_FAILED_ONLY,
+ DuplicateWorkflowPolicy.REJECT_DUPLICATE: WorkflowIDReusePolicy.REJECT_DUPLICATE,
+ DuplicateWorkflowPolicy.TERMINATE_IF_RUNNING: WorkflowIDReusePolicy.TERMINATE_IF_RUNNING,
+}
+
+
+class TemporalClient:
+ def __init__(self, temporal_client: Client | None = None):
+ self._client: Client = temporal_client
+
+ @classmethod
+ async def create(cls, temporal_address: str):
+ if temporal_address in [
+ "false",
+ "False",
+ "null",
+ "None",
+ "",
+ "undefined",
+ False,
+ None,
+ ]:
+ _client = None
+ else:
+ _client = await get_temporal_client(temporal_address)
+ return cls(_client)
+
+ async def setup(self, temporal_address: str):
+ self._client = await self._get_temporal_client(
+ temporal_address=temporal_address
+ )
+
+ async def _get_temporal_client(self, temporal_address: str) -> Client:
+ if temporal_address in [
+ "false",
+ "False",
+ "null",
+ "None",
+ "",
+ "undefined",
+ False,
+ None,
+ ]:
+ return None
+ else:
+ return await get_temporal_client(temporal_address)
+
+ async def start_workflow(
+ self,
+ *args: Any,
+ duplicate_policy: DuplicateWorkflowPolicy = DuplicateWorkflowPolicy.ALLOW_DUPLICATE,
+ retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
+ task_timeout: timedelta = timedelta(seconds=10),
+ execution_timeout: timedelta = timedelta(seconds=86400),
+ **kwargs: Any,
+ ) -> str:
+ temporal_retry_policy = TemporalRetryPolicy(
+ **retry_policy.model_dump(exclude_unset=True)
+ )
+ workflow_handle = await self._client.start_workflow(
+ *args,
+ retry_policy=temporal_retry_policy,
+ task_timeout=task_timeout,
+ execution_timeout=execution_timeout,
+ id_reuse_policy=DUPLICATE_POLICY_TO_ID_REUSE_POLICY[duplicate_policy],
+ **kwargs,
+ )
+ return workflow_handle.id
+
+ async def send_signal(
+ self,
+ workflow_id: str,
+ signal: str | Callable[[dict[str, Any] | list[Any] | str | int | float | bool | BaseModel], Any],
+ payload: dict[str, Any] | list[Any] | str | int | float | bool | BaseModel,
+ ) -> None:
+ handle = self._client.get_workflow_handle(workflow_id=workflow_id)
+ await handle.signal(signal, payload)
+
+ async def query_workflow(
+ self,
+ workflow_id: str,
+ query: str | Callable[[dict[str, Any] | list[Any] | str | int | float | bool | BaseModel], Any],
+ ) -> Any:
+ """
+ Submit a query to a workflow by name and return the results.
+
+ Args:
+ workflow_id: The ID of the workflow to query
+ query: The name of the query or a callable query function
+
+ Returns:
+ The result of the query
+ """
+ handle = self._client.get_workflow_handle(workflow_id=workflow_id)
+ return await handle.query(query)
+
+ async def get_workflow_status(self, workflow_id: str) -> WorkflowState:
+ try:
+ handle = self._client.get_workflow_handle(workflow_id=workflow_id)
+ description = await handle.describe()
+ return TEMPORAL_STATUS_TO_UPLOAD_STATUS_AND_REASON[description.status]
+ except RPCError as e:
+ if e.status == RPCStatusCode.NOT_FOUND:
+ return WorkflowState(
+ status="NOT_FOUND",
+ reason="Workflow not found",
+ is_terminal=True,
+ )
+ raise
+
+ async def terminate_workflow(self, workflow_id: str) -> None:
+ return await self._client.get_workflow_handle(workflow_id).terminate()
+
+ async def cancel_workflow(self, workflow_id: str) -> None:
+ return await self._client.get_workflow_handle(workflow_id).cancel()
diff --git a/src/agentex/lib/core/clients/temporal/types.py b/src/agentex/lib/core/clients/temporal/types.py
new file mode 100644
index 000000000..91f3db540
--- /dev/null
+++ b/src/agentex/lib/core/clients/temporal/types.py
@@ -0,0 +1,47 @@
+from datetime import timedelta
+from enum import Enum
+
+from pydantic import Field
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class WorkflowState(BaseModel):
+ status: str
+ is_terminal: bool
+ reason: str | None = None
+
+
+class RetryPolicy(BaseModel):
+ initial_interval: timedelta = Field(
+ timedelta(seconds=1),
+ description="Backoff interval for the first retry. Default 1s.",
+ )
+ backoff_coefficient: float = Field(
+ 2.0,
+ description="Coefficient to multiply previous backoff interval by to get new interval. Default 2.0.",
+ )
+ maximum_interval: timedelta | None = Field(
+ None,
+ description="Maximum backoff interval between retries. Default 100x :py:attr:`initial_interval`.",
+ )
+ maximum_attempts: int = Field(
+ 0,
+ description="Maximum number of attempts. If 0, the default, there is no maximum.",
+ )
+
+
+class DuplicateWorkflowPolicy(str, Enum):
+ ALLOW_DUPLICATE = "ALLOW_DUPLICATE"
+ ALLOW_DUPLICATE_FAILED_ONLY = "ALLOW_DUPLICATE_FAILED_ONLY"
+ REJECT_DUPLICATE = "REJECT_DUPLICATE"
+ TERMINATE_IF_RUNNING = "TERMINATE_IF_RUNNING"
+
+
+class TaskStatus(str, Enum):
+ CANCELED = "CANCELED"
+ COMPLETED = "COMPLETED"
+ FAILED = "FAILED"
+ RUNNING = "RUNNING"
+ TERMINATED = "TERMINATED"
+ TIMED_OUT = "TIMED_OUT"
diff --git a/src/agentex/lib/core/clients/temporal/utils.py b/src/agentex/lib/core/clients/temporal/utils.py
new file mode 100644
index 000000000..5e4116766
--- /dev/null
+++ b/src/agentex/lib/core/clients/temporal/utils.py
@@ -0,0 +1,56 @@
+from temporalio.client import Client
+from temporalio.contrib.pydantic import pydantic_data_converter
+from temporalio.runtime import OpenTelemetryConfig, Runtime, TelemetryConfig
+
+# class DateTimeJSONEncoder(AdvancedJSONEncoder):
+# def default(self, o: Any) -> Any:
+# if isinstance(o, datetime.datetime):
+# return o.isoformat()
+# return super().default(o)
+
+
+# class DateTimeJSONTypeConverter(JSONTypeConverter):
+# def to_typed_value(
+# self, hint: Type, value: Any
+# ) -> Union[Optional[Any], _JSONTypeConverterUnhandled]:
+# if hint == datetime.datetime:
+# return datetime.datetime.fromisoformat(value)
+# return JSONTypeConverter.Unhandled
+
+
+# class DateTimePayloadConverter(CompositePayloadConverter):
+# def __init__(self) -> None:
+# json_converter = JSONPlainPayloadConverter(
+# encoder=DateTimeJSONEncoder,
+# custom_type_converters=[DateTimeJSONTypeConverter()],
+# )
+# super().__init__(
+# *[
+# c if not isinstance(c, JSONPlainPayloadConverter) else json_converter
+# for c in DefaultPayloadConverter.default_encoding_payload_converters
+# ]
+# )
+
+
+# custom_data_converter = dataclasses.replace(
+# DataConverter.default,
+# payload_converter_class=DateTimePayloadConverter,
+# )
+
+
+async def get_temporal_client(temporal_address: str, metrics_url: str = None) -> Client:
+ if not metrics_url:
+ client = await Client.connect(
+ target_host=temporal_address,
+ # data_converter=custom_data_converter,
+ data_converter=pydantic_data_converter,
+ )
+ else:
+ runtime = Runtime(telemetry=TelemetryConfig(metrics=OpenTelemetryConfig(url=metrics_url)))
+ client = await Client.connect(
+ target_host=temporal_address,
+ # data_converter=custom_data_converter,
+ data_converter=pydantic_data_converter,
+ runtime=runtime,
+ )
+ return client
diff --git a/src/agentex/lib/core/services/__init__.py b/src/agentex/lib/core/services/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/services/adk/__init__.py b/src/agentex/lib/core/services/adk/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/services/adk/acp/__init__.py b/src/agentex/lib/core/services/adk/acp/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/services/adk/acp/acp.py b/src/agentex/lib/core/services/adk/acp/acp.py
new file mode 100644
index 000000000..dc7ad147b
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/acp/acp.py
@@ -0,0 +1,210 @@
+from typing import Any, cast
+
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+from agentex.types.event import Event
+from agentex.types.task import Task
+from agentex.types.task_message import TaskMessage
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.types.task_message_content_param import TaskMessageContentParam
+
+logger = make_logger(__name__)
+
+
+class ACPService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ tracer: AsyncTracer,
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def task_create(
+ self,
+ name: str | None = None,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ params: dict[str, Any] | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Task:
+ trace = self._tracer.trace(trace_id=trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="task_create",
+ input={
+ "name": name,
+ "agent_id": agent_id,
+ "agent_name": agent_name,
+ "params": params,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("task create")
+ if agent_name:
+ json_rpc_response = await self._agentex_client.agents.rpc_by_name(
+ agent_name=agent_name,
+ method="task/create",
+ params={
+ "name": name,
+ "params": params,
+ },
+ )
+ elif agent_id:
+ json_rpc_response = await self._agentex_client.agents.rpc(
+ agent_id=agent_id,
+ method="task/create",
+ params={
+ "name": name,
+ "params": params,
+ },
+ )
+ else:
+ raise ValueError("Either agent_name or agent_id must be provided")
+
+ task_entry = Task.model_validate(json_rpc_response["result"])
+ if span:
+ span.output = task_entry.model_dump()
+ return task_entry
+
+ async def message_send(
+ self,
+ content: TaskMessageContent,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> TaskMessage:
+ trace = self._tracer.trace(trace_id=trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="message_send",
+ input={
+ "agent_id": agent_id,
+ "agent_name": agent_name,
+ "task_id": task_id,
+ "task_name": task_name,
+ "message": content,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("message send")
+ if agent_name:
+ json_rpc_response = await self._agentex_client.agents.rpc_by_name(
+ agent_name=agent_name,
+ method="message/send",
+ params={
+ "task_id": task_id,
+ "content": cast(TaskMessageContentParam, content.model_dump()),
+ "stream": False,
+ },
+ )
+ elif agent_id:
+ json_rpc_response = await self._agentex_client.agents.rpc(
+ agent_id=agent_id,
+ method="message/send",
+ params={
+ "task_id": task_id,
+ "content": cast(TaskMessageContentParam, content.model_dump()),
+ "stream": False,
+ },
+ )
+ else:
+ raise ValueError("Either agent_name or agent_id must be provided")
+
+ task_message = TaskMessage.model_validate(json_rpc_response["result"])
+ if span:
+ span.output = task_message.model_dump()
+ return task_message
+
+ async def event_send(
+ self,
+ content: TaskMessageContent,
+ agent_id: str | None = None,
+ agent_name: str | None = None,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Event:
+ trace = self._tracer.trace(trace_id=trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="event_send",
+ input={
+ "agent_id": agent_id,
+ "agent_name": agent_name,
+ "task_id": task_id,
+ "content": content,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("event send")
+ if agent_name:
+ json_rpc_response = await self._agentex_client.agents.rpc_by_name(
+ agent_name=agent_name,
+ method="event/send",
+ params={
+ "task_name": task_name,
+ "content": cast(TaskMessageContentParam, content.model_dump()),
+ },
+ )
+ elif agent_id:
+ json_rpc_response = await self._agentex_client.agents.rpc(
+ agent_id=agent_id,
+ method="event/send",
+ params={
+ "task_id": task_id,
+ "content": cast(TaskMessageContentParam, content.model_dump()),
+ },
+ )
+ else:
+ raise ValueError("Either agent_name or agent_id must be provided")
+
+ event_entry = Event.model_validate(json_rpc_response["result"])
+ if span:
+ span.output = event_entry.model_dump()
+ return event_entry
+
+ async def task_cancel(
+ self,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Task:
+ trace = self._tracer.trace(trace_id=trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="task_cancel",
+ input={
+ "task_id": task_id,
+ "task_name": task_name,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("task cancel")
+ if task_name:
+ json_rpc_response = await self._agentex_client.agents.rpc_by_name(
+ agent_name=task_name,
+ method="task/cancel",
+ params={
+ "task_name": task_name,
+ },
+ )
+ elif task_id:
+ json_rpc_response = await self._agentex_client.agents.rpc(
+ agent_id=task_id,
+ method="task/cancel",
+ params={
+ "task_id": task_id,
+ },
+ )
+ else:
+ raise ValueError("Either task_name or task_id must be provided")
+
+ task_entry = Task.model_validate(json_rpc_response["result"])
+ if span:
+ span.output = task_entry.model_dump()
+ return task_entry
diff --git a/src/agentex/lib/core/services/adk/agent_task_tracker.py b/src/agentex/lib/core/services/adk/agent_task_tracker.py
new file mode 100644
index 000000000..ad9dc42c6
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/agent_task_tracker.py
@@ -0,0 +1,85 @@
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.utils.logging import make_logger
+from agentex.types.agent_task_tracker import AgentTaskTracker
+
+logger = make_logger(__name__)
+
+
+class AgentTaskTrackerService:
+ def __init__(
+ self, agentex_client: AsyncAgentex, tracer: AsyncTracer,
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def get_agent_task_tracker(
+ self,
+ tracker_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> AgentTaskTracker:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_agent_task_tracker",
+ input={"tracker_id": tracker_id},
+ ) as span:
+ tracker = await self._agentex_client.tracker.retrieve(
+ tracker_id
+ )
+ if span:
+ span.output = tracker.model_dump()
+ return tracker
+
+ async def get_by_task_and_agent(
+ self,
+ task_id: str,
+ agent_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> AgentTaskTracker | None:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_by_task_and_agent",
+ input={"task_id": task_id, "agent_id": agent_id},
+ ) as span:
+ trackers = await self._agentex_client.tracker.list(
+ task_id=task_id,
+ agent_id=agent_id,
+ )
+ tracker = trackers[0] if trackers else None
+ if span:
+ span.output = tracker.model_dump() if tracker else None
+ return tracker
+
+ async def update_agent_task_tracker(
+ self,
+ tracker_id: str,
+ last_processed_event_id: str | None = None,
+ status: str | None = None,
+ status_reason: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> AgentTaskTracker:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="update_agent_task_tracker",
+ input={
+ "tracker_id": tracker_id,
+ "last_processed_event_id": last_processed_event_id,
+ "status": status,
+ "status_reason": status_reason,
+ },
+ ) as span:
+ tracker = await self._agentex_client.tracker.update(
+ tracker_id=tracker_id,
+ last_processed_event_id=last_processed_event_id,
+ status=status,
+ status_reason=status_reason,
+ )
+ if span:
+ span.output = tracker.model_dump()
+ return tracker
diff --git a/src/agentex/lib/core/services/adk/agents.py b/src/agentex/lib/core/services/adk/agents.py
new file mode 100644
index 000000000..7600c7bee
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/agents.py
@@ -0,0 +1,43 @@
+from typing import Optional
+
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.agent import Agent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = make_logger(__name__)
+
+
+class AgentsService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ tracer: AsyncTracer,
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def get_agent(
+ self,
+ agent_id: Optional[str] = None,
+ agent_name: Optional[str] = None,
+ trace_id: Optional[str] = None,
+ parent_span_id: Optional[str] = None,
+ ) -> Agent:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_agent",
+ input={"agent_id": agent_id, "agent_name": agent_name},
+ ) as span:
+ heartbeat_if_in_workflow("get agent")
+ if agent_id:
+ agent = await self._agentex_client.agents.retrieve(agent_id=agent_id)
+ elif agent_name:
+ agent = await self._agentex_client.agents.retrieve_by_name(agent_name=agent_name)
+ else:
+ raise ValueError("Either agent_id or agent_name must be provided")
+ if span:
+ span.output = agent.model_dump()
+ return agent
diff --git a/src/agentex/lib/core/services/adk/events.py b/src/agentex/lib/core/services/adk/events.py
new file mode 100644
index 000000000..839946049
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/events.py
@@ -0,0 +1,61 @@
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.event import Event
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class EventsService:
+ def __init__(
+ self, agentex_client: AsyncAgentex, tracer: AsyncTracer
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def get_event(
+ self,
+ event_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Event | None:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_event",
+ input={"event_id": event_id},
+ ) as span:
+ event = await self._agentex_client.events.retrieve(event_id=event_id)
+ if span:
+ span.output = event.model_dump()
+ return event
+
+ async def list_events(
+ self,
+ task_id: str,
+ agent_id: str,
+ last_processed_event_id: str | None = None,
+ limit: int | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> list[Event]:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="list_events",
+ input={
+ "task_id": task_id,
+ "agent_id": agent_id,
+ "last_processed_event_id": last_processed_event_id,
+ "limit": limit,
+ },
+ ) as span:
+ events = await self._agentex_client.events.list(
+ task_id=task_id,
+ agent_id=agent_id,
+ last_processed_event_id=last_processed_event_id,
+ limit=limit,
+ )
+ if span:
+ span.output = [event.model_dump() for event in events]
+ return events
diff --git a/src/agentex/lib/core/services/adk/messages.py b/src/agentex/lib/core/services/adk/messages.py
new file mode 100644
index 000000000..f6f1ce6e9
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/messages.py
@@ -0,0 +1,164 @@
+import asyncio
+from typing import Any, Coroutine, cast
+
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.task_message_updates import StreamTaskMessageFull, TaskMessageUpdate
+from agentex.types.task_message import TaskMessage, TaskMessageContent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+from agentex.types.task_message_content_param import TaskMessageContentParam
+
+logger = make_logger(__name__)
+
+
+class MessagesService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ streaming_service: StreamingService,
+ tracer: AsyncTracer,
+ ):
+ self._agentex_client = agentex_client
+ self._streaming_service = streaming_service
+ self._tracer = tracer
+
+ async def create_message(
+ self,
+ task_id: str,
+ content: TaskMessageContent,
+ emit_updates: bool = True,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> TaskMessage:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="create_message",
+ input={"task_id": task_id, "message": content},
+ ) as span:
+ heartbeat_if_in_workflow("create message")
+ task_message = await self._agentex_client.messages.create(
+ task_id=task_id,
+ content=content.model_dump(),
+ )
+ if emit_updates:
+ await self._emit_updates([task_message])
+ if span:
+ span.output = task_message.model_dump()
+ return task_message
+
+ async def update_message(
+ self,
+ task_id: str,
+ message_id: str,
+ content: TaskMessageContent,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> TaskMessage:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="update_message",
+ input={
+ "task_id": task_id,
+ "message_id": message_id,
+ "message": content,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("update message")
+ task_message = await self._agentex_client.messages.update(
+ task_id=task_id,
+ message_id=message_id,
+ content=content.model_dump(),
+ )
+ if span:
+ span.output = task_message.model_dump()
+ return task_message
+
+ async def create_messages_batch(
+ self,
+ task_id: str,
+ contents: list[TaskMessageContent],
+ emit_updates: bool = True,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> list[TaskMessage]:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="create_messages_batch",
+ input={"task_id": task_id, "messages": contents},
+ ) as span:
+ heartbeat_if_in_workflow("create messages batch")
+ task_messages = await self._agentex_client.messages.batch.create(
+ task_id=task_id,
+ contents=[content.model_dump() for content in contents],
+ )
+ if emit_updates:
+ await self._emit_updates(task_messages)
+ if span:
+ span.output = [task_message.model_dump() for task_message in task_messages]
+ return task_messages
+
+ async def update_messages_batch(
+ self,
+ task_id: str,
+ updates: dict[str, TaskMessageContent],
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> list[TaskMessage]:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="update_messages_batch",
+ input={"task_id": task_id, "updates": updates},
+ ) as span:
+ heartbeat_if_in_workflow("update messages batch")
+ task_messages = await self._agentex_client.messages.batch.update(
+ task_id=task_id,
+ updates={
+ message_id: content.model_dump()
+ for message_id, content in updates.items()
+ },
+ )
+ if span:
+ span.output = [task_message.model_dump() for task_message in task_messages]
+ return task_messages
+
+ async def list_messages(
+ self,
+ task_id: str,
+ limit: int | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> list[TaskMessage]:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="list_messages",
+ input={"task_id": task_id, "limit": limit},
+ ) as span:
+ heartbeat_if_in_workflow("list messages")
+ task_messages = await self._agentex_client.messages.list(
+ task_id=task_id,
+ limit=limit,
+ )
+ if span:
+ span.output = [task_message.model_dump() for task_message in task_messages]
+ return task_messages
+
+ async def _emit_updates(self, task_messages: list[TaskMessage]) -> None:
+ stream_update_handlers: list[Coroutine[Any, Any, TaskMessageUpdate | None]] = []
+ for task_message in task_messages:
+ stream_update_handler = self._streaming_service.stream_update(
+ update=StreamTaskMessageFull(
+ type="full",
+ parent_task_message=task_message,
+ content=task_message.content,
+ )
+ )
+ stream_update_handlers.append(stream_update_handler)
+
+ await asyncio.gather(*stream_update_handlers)
diff --git a/src/agentex/lib/core/services/adk/providers/__init__.py b/src/agentex/lib/core/services/adk/providers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/services/adk/providers/litellm.py b/src/agentex/lib/core/services/adk/providers/litellm.py
new file mode 100644
index 000000000..06999812a
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/providers/litellm.py
@@ -0,0 +1,256 @@
+from collections.abc import AsyncGenerator
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.llm.adapter_litellm import LiteLLMGateway
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.llm_messages import (
+ Completion,
+ LLMConfig,
+)
+from agentex.lib.types.task_message_updates import (
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ TextDelta,
+)
+from agentex.types.task_message import TaskMessage
+from agentex.types.task_message_content import TextContent
+from agentex.lib.utils import logging
+from agentex.lib.utils.completions import concat_completion_chunks
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = logging.make_logger(__name__)
+
+
+class LiteLLMService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ streaming_service: StreamingService,
+ tracer: AsyncTracer,
+ llm_gateway: LiteLLMGateway | None = None,
+ ):
+ self.agentex_client = agentex_client
+ self.llm_gateway = llm_gateway
+ self.streaming_service = streaming_service
+ self.tracer = tracer
+
+ async def chat_completion(
+ self,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Completion:
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="chat_completion",
+ input=llm_config.model_dump(),
+ ) as span:
+ heartbeat_if_in_workflow("chat completion")
+ if self.llm_gateway is None:
+ raise ValueError("LLM Gateway is not set")
+ completion = await self.llm_gateway.acompletion(**llm_config.model_dump())
+ if span:
+ span.output = completion.model_dump()
+ return completion
+
+ async def chat_completion_auto_send(
+ self,
+ task_id: str,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> TaskMessage | None:
+ """
+ Chat completion with automatic TaskMessage creation. This does not stream the completion. To stream use chat_completion_stream_auto_send.
+
+ Args:
+ task_id (str): The ID of the task to run the agent for.
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=True).
+
+ Returns:
+ TaskMessage: A TaskMessage object
+ """
+
+ if llm_config.stream:
+ raise ValueError(
+ "LLM config must not have stream=True. To stream use `chat_completion_stream` or `chat_completion_stream_auto_send`."
+ )
+
+ if self.llm_gateway is None:
+ raise ValueError("LLM Gateway is not set")
+
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="chat_completion_auto_send",
+ input=llm_config.model_dump(),
+ ) as span:
+ heartbeat_if_in_workflow("chat completion auto send")
+
+ async with self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=TextContent(
+ author="agent",
+ content="",
+ format="markdown",
+ ),
+ ) as streaming_context:
+ completion = await self.llm_gateway.acompletion(**llm_config.model_dump())
+ if (
+ completion.choices
+ and len(completion.choices) > 0
+ and completion.choices[0].message
+ ):
+ final_content = TextContent(
+ author="agent",
+ content=completion.choices[0].message.content or "",
+ format="markdown",
+ )
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=final_content,
+ ),
+ )
+ else:
+ raise ValueError("No completion message returned from LLM")
+
+ if span:
+ if streaming_context.task_message:
+ span.output = streaming_context.task_message.model_dump()
+ return streaming_context.task_message if streaming_context.task_message else None
+
+ async def chat_completion_stream(
+ self,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> AsyncGenerator[Completion, None]:
+ """
+ Stream chat completion chunks using LiteLLM.
+
+ Args:
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=True).
+ trace_id (Optional[str]): The trace ID for tracing.
+ parent_span_id (Optional[str]): The parent span ID for tracing.
+
+ Returns:
+ AsyncGenerator[Completion, None]: Generator yielding completion chunks
+
+ Raises:
+ ValueError: If called from within a Temporal workflow or if stream=False
+ """
+ if not llm_config.stream:
+ raise ValueError("LLM config must have stream=True for streaming")
+
+ if self.llm_gateway is None:
+ raise ValueError("LLM Gateway is not set")
+
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="chat_completion_stream",
+ input=llm_config.model_dump(),
+ ) as span:
+ # Direct streaming outside temporal - yield each chunk as it comes
+ chunks: list[Completion] = []
+ async for chunk in self.llm_gateway.acompletion_stream(
+ **llm_config.model_dump()
+ ):
+ chunks.append(chunk)
+ yield chunk
+ if span:
+ span.output = concat_completion_chunks(chunks).model_dump()
+
+ async def chat_completion_stream_auto_send(
+ self,
+ task_id: str,
+ llm_config: LLMConfig,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> TaskMessage | None:
+ """
+ Stream chat completion with automatic TaskMessage creation and streaming.
+
+ Args:
+ task_id (str): The ID of the task to run the agent for.
+ llm_config (LLMConfig): The configuration for the LLM (must have stream=True).
+
+ Returns:
+ TaskMessage: A TaskMessage object
+ """
+ heartbeat_if_in_workflow("chat completion stream")
+
+ if self.llm_gateway is None:
+ raise ValueError("LLM Gateway is not set")
+
+ if not llm_config.stream:
+ llm_config.stream = True
+
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="chat_completion_stream",
+ input=llm_config.model_dump(),
+ ) as span:
+ # Use streaming context manager
+ async with self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=TextContent(
+ author="agent",
+ content="",
+ format="markdown",
+ ),
+ ) as streaming_context:
+ # Get the streaming response
+ chunks = []
+ async for response in self.llm_gateway.acompletion_stream(
+ **llm_config.model_dump()
+ ):
+ heartbeat_if_in_workflow("chat completion streaming")
+ if (
+ response.choices
+ and len(response.choices) > 0
+ and response.choices[0].delta
+ ):
+ delta = response.choices[0].delta.content
+ if delta:
+ # Stream the chunk via the context manager
+ await streaming_context.stream_update(
+ update=StreamTaskMessageDelta(
+ parent_task_message=streaming_context.task_message,
+ delta=TextDelta(text_delta=delta),
+ ),
+ )
+ heartbeat_if_in_workflow("content chunk streamed")
+
+ # Store the chunk for final message assembly
+ chunks.append(response)
+
+ # Update the final message content
+ complete_message = concat_completion_chunks(chunks)
+ if (
+ complete_message
+ and complete_message.choices
+ and complete_message.choices[0].message
+ ):
+ final_content = TextContent(
+ author="agent",
+ content=complete_message.choices[0].message.content or "",
+ )
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=final_content,
+ ),
+ )
+
+ heartbeat_if_in_workflow("chat completion stream complete")
+
+ if span:
+ if streaming_context.task_message:
+ span.output = streaming_context.task_message.model_dump()
+
+ return streaming_context.task_message if streaming_context.task_message else None
diff --git a/src/agentex/lib/core/services/adk/providers/openai.py b/src/agentex/lib/core/services/adk/providers/openai.py
new file mode 100644
index 000000000..02604865c
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/providers/openai.py
@@ -0,0 +1,723 @@
+# Standard library imports
+import json
+from contextlib import AsyncExitStack, asynccontextmanager
+from typing import Any, Literal
+
+from agents import Agent, Runner, RunResult, RunResultStreaming
+from agents.agent import StopAtTools, ToolsToFinalOutputFunction
+from agents.mcp import MCPServerStdio
+from mcp import StdioServerParameters
+from openai.types.responses import (
+ ResponseCompletedEvent,
+ ResponseFunctionToolCall,
+ ResponseOutputItemDoneEvent,
+ ResponseTextDeltaEvent,
+)
+from pydantic import BaseModel
+
+# Local imports
+from agentex import AsyncAgentex
+from agentex.lib.core.services.adk.streaming import (
+ StreamingService,
+ StreamingTaskMessageContext,
+)
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.task_message_updates import (
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ TextDelta,
+)
+from agentex.types.task_message_content import (
+ TextContent,
+ ToolRequestContent,
+ ToolResponseContent,
+)
+from agentex.lib.utils import logging
+from agentex.lib.utils.mcp import redact_mcp_server_params
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = logging.make_logger(__name__)
+
+
+@asynccontextmanager
+async def mcp_server_context(
+ mcp_server_params: list[StdioServerParameters],
+ mcp_timeout_seconds: int | None = None,
+):
+ """Context manager for MCP servers."""
+ servers = []
+ for params in mcp_server_params:
+ server = MCPServerStdio(
+ name=f"Server: {params.command}",
+ params=params.model_dump(),
+ cache_tools_list=True,
+ client_session_timeout_seconds=mcp_timeout_seconds,
+ )
+ servers.append(server)
+
+ async with AsyncExitStack() as stack:
+ for server in servers:
+ await stack.enter_async_context(server)
+ yield servers
+
+
+class OpenAIService:
+ """Service for OpenAI agent operations using the agents library."""
+
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex | None = None,
+ streaming_service: StreamingService | None = None,
+ tracer: AsyncTracer | None = None,
+ ):
+ self.agentex_client = agentex_client
+ self.streaming_service = streaming_service
+ self.tracer = tracer
+
+ async def run_agent(
+ self,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ handoff_description: str | None = None,
+ handoffs: list[BaseModel] | None = None,
+ model: str | None = None,
+ model_settings: BaseModel | None = None,
+ tools: list[BaseModel] | None = None,
+ output_type: type[Any] | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ mcp_timeout_seconds: int | None = None,
+ ) -> RunResult:
+ """
+ Run an agent without streaming or TaskMessage creation.
+
+ Args:
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span ID for tracing.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+ mcp_timeout_seconds: Optional param to set the timeout threshold for the MCP servers. Defaults to 5 seconds.
+
+ Returns:
+ SerializableRunResult: The result of the agent run.
+ """
+ redacted_params = redact_mcp_server_params(mcp_server_params)
+
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="run_agent",
+ input={
+ "input_list": input_list,
+ "mcp_server_params": redacted_params,
+ "agent_name": agent_name,
+ "agent_instructions": agent_instructions,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "model_settings": model_settings,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("run agent")
+
+ async with mcp_server_context(
+ mcp_server_params, mcp_timeout_seconds
+ ) as servers:
+ tools = [tool.to_oai_function_tool() for tool in tools] if tools else []
+ handoffs = (
+ [Agent(**handoff.model_dump()) for handoff in handoffs]
+ if handoffs
+ else []
+ )
+
+ agent_kwargs = {
+ "name": agent_name,
+ "instructions": agent_instructions,
+ "mcp_servers": servers,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ }
+ if model_settings is not None:
+ agent_kwargs["model_settings"] = (
+ model_settings.to_oai_model_settings()
+ )
+
+ agent = Agent(**agent_kwargs)
+
+ # Run without streaming
+ result = await Runner.run(starting_agent=agent, input=input_list)
+
+ if span:
+ span.output = {
+ "new_items": [
+ item.raw_item.model_dump()
+ if isinstance(item.raw_item, BaseModel)
+ else item.raw_item
+ for item in result.new_items
+ ],
+ "final_output": result.final_output,
+ }
+
+ return result
+
+ async def run_agent_auto_send(
+ self,
+ task_id: str,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ handoff_description: str | None = None,
+ handoffs: list[BaseModel] | None = None,
+ model: str | None = None,
+ model_settings: BaseModel | None = None,
+ tools: list[BaseModel] | None = None,
+ output_type: type[Any] | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ mcp_timeout_seconds: int | None = None,
+ ) -> RunResult:
+ """
+ Run an agent with automatic TaskMessage creation.
+
+ Args:
+ task_id: The ID of the task to run the agent for.
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span ID for tracing.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+ mcp_timeout_seconds: Optional param to set the timeout threshold for the MCP servers. Defaults to 5 seconds.
+
+ Returns:
+ SerializableRunResult: The result of the agent run.
+ """
+ if self.streaming_service is None:
+ raise ValueError("StreamingService must be available for auto_send methods")
+ if self.agentex_client is None:
+ raise ValueError("Agentex client must be provided for auto_send methods")
+
+ redacted_params = redact_mcp_server_params(mcp_server_params)
+
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="run_agent_auto_send",
+ input={
+ "task_id": task_id,
+ "input_list": input_list,
+ "mcp_server_params": redacted_params,
+ "agent_name": agent_name,
+ "agent_instructions": agent_instructions,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "model_settings": model_settings,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("run agent auto send")
+
+ async with mcp_server_context(
+ mcp_server_params, mcp_timeout_seconds
+ ) as servers:
+ tools = [tool.to_oai_function_tool() for tool in tools] if tools else []
+ handoffs = (
+ [Agent(**handoff.model_dump()) for handoff in handoffs]
+ if handoffs
+ else []
+ )
+ agent_kwargs = {
+ "name": agent_name,
+ "instructions": agent_instructions,
+ "mcp_servers": servers,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ }
+ if model_settings is not None:
+ agent_kwargs["model_settings"] = (
+ model_settings.to_oai_model_settings()
+ )
+
+ agent = Agent(**agent_kwargs)
+
+ # Run without streaming
+ result = await Runner.run(starting_agent=agent, input=input_list)
+
+ if span:
+ span.output = {
+ "new_items": [
+ item.raw_item.model_dump()
+ if isinstance(item.raw_item, BaseModel)
+ else item.raw_item
+ for item in result.new_items
+ ],
+ "final_output": result.final_output,
+ }
+
+ tool_call_map: dict[str, ResponseFunctionToolCall] = {}
+
+ for item in result.new_items:
+ if item.type == "message_output_item":
+ text_content = TextContent(
+ author="agent",
+ content=item.raw_item.content[0].text,
+ )
+ # Create message for the final result using streaming context
+ async with (
+ self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=text_content,
+ ) as streaming_context
+ ):
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=text_content,
+ ),
+ )
+
+ elif item.type == "tool_call_item":
+ tool_call_map[item.raw_item.call_id] = item.raw_item
+
+ tool_request_content = ToolRequestContent(
+ author="agent",
+ tool_call_id=item.raw_item.call_id,
+ name=item.raw_item.name,
+ arguments=json.loads(item.raw_item.arguments),
+ )
+
+ # Create tool request using streaming context
+ async with (
+ self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_request_content,
+ ) as streaming_context
+ ):
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_request_content,
+ ),
+ )
+
+ elif item.type == "tool_call_output_item":
+ tool_output_item = item.raw_item
+
+ tool_response_content = ToolResponseContent(
+ author="agent",
+ tool_call_id=tool_output_item["call_id"],
+ name=tool_call_map[tool_output_item["call_id"]].name,
+ content=tool_output_item["output"],
+ )
+ # Create tool response using streaming context
+ async with (
+ self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_response_content,
+ ) as streaming_context
+ ):
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_response_content,
+ ),
+ )
+
+ # Convert to serializable result
+ return result
+
+ async def run_agent_streamed(
+ self,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ handoff_description: str | None = None,
+ handoffs: list[BaseModel] | None = None,
+ model: str | None = None,
+ model_settings: BaseModel | None = None,
+ tools: list[BaseModel] | None = None,
+ output_type: type[Any] | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ mcp_timeout_seconds: int | None = None,
+ ) -> RunResultStreaming:
+ """
+ Run an agent with streaming enabled but no TaskMessage creation.
+
+ Args:
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span ID for tracing.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+ mcp_timeout_seconds: Optional param to set the timeout threshold for the MCP servers. Defaults to 5 seconds.
+
+ Returns:
+ RunResultStreaming: The result of the agent run with streaming.
+ """
+ trace = self.tracer.trace(trace_id)
+ redacted_params = redact_mcp_server_params(mcp_server_params)
+
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="run_agent_streamed",
+ input={
+ "input_list": input_list,
+ "mcp_server_params": redacted_params,
+ "agent_name": agent_name,
+ "agent_instructions": agent_instructions,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "model_settings": model_settings,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("run agent streamed")
+
+ async with mcp_server_context(
+ mcp_server_params, mcp_timeout_seconds
+ ) as servers:
+ tools = [tool.to_oai_function_tool() for tool in tools] if tools else []
+ handoffs = (
+ [Agent(**handoff.model_dump()) for handoff in handoffs]
+ if handoffs
+ else []
+ )
+ agent_kwargs = {
+ "name": agent_name,
+ "instructions": agent_instructions,
+ "mcp_servers": servers,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ }
+ if model_settings is not None:
+ agent_kwargs["model_settings"] = (
+ model_settings.to_oai_model_settings()
+ )
+
+ agent = Agent(**agent_kwargs)
+
+ # Run with streaming (but no TaskMessage creation)
+ result = Runner.run_streamed(starting_agent=agent, input=input_list)
+
+ if span:
+ span.output = {
+ "new_items": [
+ item.raw_item.model_dump()
+ if isinstance(item.raw_item, BaseModel)
+ else item.raw_item
+ for item in result.new_items
+ ],
+ "final_output": result.final_output,
+ }
+
+ return result
+
+ async def run_agent_streamed_auto_send(
+ self,
+ task_id: str,
+ input_list: list[dict[str, Any]],
+ mcp_server_params: list[StdioServerParameters],
+ agent_name: str,
+ agent_instructions: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ handoff_description: str | None = None,
+ handoffs: list[BaseModel] | None = None,
+ model: str | None = None,
+ model_settings: BaseModel | None = None,
+ tools: list[BaseModel] | None = None,
+ output_type: type[Any] | None = None,
+ tool_use_behavior: (
+ Literal["run_llm_again", "stop_on_first_tool"]
+ | StopAtTools
+ | ToolsToFinalOutputFunction
+ ) = "run_llm_again",
+ mcp_timeout_seconds: int | None = None,
+ ) -> RunResultStreaming:
+ """
+ Run an agent with streaming enabled and automatic TaskMessage creation.
+
+ Args:
+ task_id: The ID of the task to run the agent for.
+ input_list: List of input data for the agent.
+ mcp_server_params: MCP server parameters for the agent.
+ agent_name: The name of the agent to run.
+ agent_instructions: Instructions for the agent.
+ trace_id: Optional trace ID for tracing.
+ parent_span_id: Optional parent span ID for tracing.
+ handoff_description: Optional description of the handoff.
+ handoffs: Optional list of handoffs.
+ model: Optional model to use.
+ model_settings: Optional model settings.
+ tools: Optional list of tools.
+ output_type: Optional output type.
+ tool_use_behavior: Optional tool use behavior.
+ mcp_timeout_seconds: Optional param to set the timeout threshold for the MCP servers. Defaults to 5 seconds.
+
+ Returns:
+ RunResultStreaming: The result of the agent run with streaming.
+ """
+ if self.streaming_service is None:
+ raise ValueError("StreamingService must be available for auto_send methods")
+ if self.agentex_client is None:
+ raise ValueError("Agentex client must be provided for auto_send methods")
+
+ tool_call_map: dict[str, ResponseFunctionToolCall] = {}
+
+ trace = self.tracer.trace(trace_id)
+ redacted_params = redact_mcp_server_params(mcp_server_params)
+
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="run_agent_streamed_auto_send",
+ input={
+ "task_id": task_id,
+ "input_list": input_list,
+ "mcp_server_params": redacted_params,
+ "agent_name": agent_name,
+ "agent_instructions": agent_instructions,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "model_settings": model_settings,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ },
+ ) as span:
+ heartbeat_if_in_workflow("run agent streamed auto send")
+
+ async with mcp_server_context(
+ mcp_server_params, mcp_timeout_seconds
+ ) as servers:
+ tools = [tool.to_oai_function_tool() for tool in tools] if tools else []
+ handoffs = (
+ [Agent(**handoff.model_dump()) for handoff in handoffs]
+ if handoffs
+ else []
+ )
+ agent_kwargs = {
+ "name": agent_name,
+ "instructions": agent_instructions,
+ "mcp_servers": servers,
+ "handoff_description": handoff_description,
+ "handoffs": handoffs,
+ "model": model,
+ "tools": tools,
+ "output_type": output_type,
+ "tool_use_behavior": tool_use_behavior,
+ }
+ if model_settings is not None:
+ agent_kwargs["model_settings"] = (
+ model_settings.to_oai_model_settings()
+ )
+
+ agent = Agent(**agent_kwargs)
+
+ # Run with streaming
+ result = Runner.run_streamed(starting_agent=agent, input=input_list)
+
+ item_id_to_streaming_context: dict[
+ str, StreamingTaskMessageContext
+ ] = {}
+ unclosed_item_ids: set[str] = set()
+
+ try:
+ # Process streaming events with TaskMessage creation
+ async for event in result.stream_events():
+ heartbeat_if_in_workflow(
+ "processing stream event with auto send"
+ )
+
+ if event.type == "run_item_stream_event":
+ if event.item.type == "tool_call_item":
+ tool_call_item = event.item.raw_item
+ tool_call_map[tool_call_item.call_id] = tool_call_item
+
+ tool_request_content = ToolRequestContent(
+ author="agent",
+ tool_call_id=tool_call_item.call_id,
+ name=tool_call_item.name,
+ arguments=json.loads(tool_call_item.arguments),
+ )
+
+ # Create tool request using streaming context (immediate completion)
+ async with (
+ self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_request_content,
+ ) as streaming_context
+ ):
+ # The message has already been persisted, but we still need to send an upda
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_request_content,
+ ),
+ )
+
+ elif event.item.type == "tool_call_output_item":
+ tool_output_item = event.item.raw_item
+
+ tool_response_content = ToolResponseContent(
+ author="agent",
+ tool_call_id=tool_output_item["call_id"],
+ name=tool_call_map[
+ tool_output_item["call_id"]
+ ].name,
+ content=tool_output_item["output"],
+ )
+
+ # Create tool response using streaming context (immediate completion)
+ async with (
+ self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=tool_response_content,
+ ) as streaming_context
+ ):
+ # The message has already been persisted, but we still need to send an update
+ await streaming_context.stream_update(
+ update=StreamTaskMessageFull(
+ parent_task_message=streaming_context.task_message,
+ content=tool_response_content,
+ ),
+ )
+
+ elif event.type == "raw_response_event":
+ if isinstance(event.data, ResponseTextDeltaEvent):
+ # Handle text delta
+ item_id = event.data.item_id
+
+ # Check if we already have a streaming context for this item
+ if item_id not in item_id_to_streaming_context:
+ # Create a new streaming context for this item
+ streaming_context = self.streaming_service.streaming_task_message_context(
+ task_id=task_id,
+ initial_content=TextContent(
+ author="agent",
+ content="",
+ ),
+ )
+ # Open the streaming context
+ item_id_to_streaming_context[
+ item_id
+ ] = await streaming_context.open()
+ unclosed_item_ids.add(item_id)
+ else:
+ streaming_context = item_id_to_streaming_context[
+ item_id
+ ]
+
+ # Stream the delta through the streaming service
+ await streaming_context.stream_update(
+ update=StreamTaskMessageDelta(
+ parent_task_message=streaming_context.task_message,
+ delta=TextDelta(text_delta=event.data.delta),
+ ),
+ )
+
+ elif isinstance(event.data, ResponseOutputItemDoneEvent):
+ # Handle item completion
+ item_id = event.data.item.id
+
+ # Finish the streaming context (sends DONE event and updates message)
+ if item_id in item_id_to_streaming_context:
+ streaming_context = item_id_to_streaming_context[
+ item_id
+ ]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+
+ elif isinstance(event.data, ResponseCompletedEvent):
+ # All items complete, finish all remaining streaming contexts for this session
+ for item_id in unclosed_item_ids:
+ streaming_context = item_id_to_streaming_context[
+ item_id
+ ]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+
+ finally:
+ # Cleanup: ensure all streaming contexts for this session are properly finished
+ for item_id in unclosed_item_ids:
+ streaming_context = item_id_to_streaming_context[item_id]
+ await streaming_context.close()
+ unclosed_item_ids.remove(item_id)
+
+ if span:
+ span.output = {
+ "new_items": [
+ item.raw_item.model_dump()
+ if isinstance(item.raw_item, BaseModel)
+ else item.raw_item
+ for item in result.new_items
+ ],
+ "final_output": result.final_output,
+ }
+
+ return result
diff --git a/src/agentex/lib/core/services/adk/providers/sgp.py b/src/agentex/lib/core/services/adk/providers/sgp.py
new file mode 100644
index 000000000..4fbbd9f0c
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/providers/sgp.py
@@ -0,0 +1,99 @@
+import base64
+import os
+import tempfile
+
+from scale_gp import SGPClient
+
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.types.files import FileContentResponse
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = make_logger(__name__)
+
+
+class SGPService:
+ def __init__(self, sgp_client: SGPClient, tracer: AsyncTracer):
+ self.sgp_client = sgp_client
+ self.tracer = tracer
+
+ async def download_file_content(
+ self,
+ file_id: str,
+ filename: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> FileContentResponse:
+ """
+ Download file content from SGP.
+
+ Args:
+ file_id: The ID of the file to download.
+ filename: The filename of the file to download.
+ trace_id: The trace ID for tracing.
+ parent_span_id: The parent span ID for tracing.
+
+ Returns:
+ FileContentResponse with mime_type and base64_content for constructing LLM input.
+ """
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="download_file_content",
+ input={"file_id": file_id, "filename": filename},
+ ) as span:
+ logger.info(f"Downloading file content for file_id: {file_id}")
+ heartbeat_if_in_workflow("downloading file content")
+
+ # Get the SGP response
+ response = self.sgp_client.beta.files.content(file_id)
+ heartbeat_if_in_workflow("file content downloaded")
+
+ # Determine mime type based on file extension
+ mime_type = "application/pdf" # Default
+ file_extension = os.path.splitext(filename)[1].lower()
+ if file_extension:
+ if file_extension == ".pdf":
+ mime_type = "application/pdf"
+ elif file_extension in [".doc", ".docx"]:
+ mime_type = "application/msword"
+ elif file_extension in [".txt", ".text"]:
+ mime_type = "text/plain"
+ elif file_extension in [".png"]:
+ mime_type = "image/png"
+ elif file_extension in [".jpg", ".jpeg"]:
+ mime_type = "image/jpeg"
+
+ # Use a named temporary file - simpler approach
+ with tempfile.NamedTemporaryFile(suffix=file_extension) as temp_file:
+ heartbeat_if_in_workflow(f"saving to temp file: {temp_file.name}")
+
+ # Use write_to_file method if available
+ if hasattr(response, "write_to_file"):
+ response.write_to_file(temp_file.name)
+ else:
+ # Fallback to direct writing
+ content_bytes = response.read()
+ temp_file.write(content_bytes)
+ temp_file.flush()
+
+ # Seek to beginning of file for reading
+ temp_file.seek(0)
+
+ # Read the file in binary mode - exactly like the example
+ data = temp_file.read()
+
+ # Encode to base64
+ base64_content = base64.b64encode(data).decode("utf-8")
+
+ result = FileContentResponse(
+ mime_type=mime_type, base64_content=base64_content
+ )
+
+ # Record metadata for tracing
+ span.output = {
+ "file_id": file_id,
+ "mime_type": result.mime_type,
+ "content_size": len(result.base64_content),
+ }
+ return result
diff --git a/src/agentex/lib/core/services/adk/state.py b/src/agentex/lib/core/services/adk/state.py
new file mode 100644
index 000000000..e27ee41fb
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/state.py
@@ -0,0 +1,120 @@
+from typing import Any, Dict
+
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.state import State
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class StateService:
+ def __init__(
+ self, agentex_client: AsyncAgentex, tracer: AsyncTracer
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def create_state(
+ self,
+ task_id: str,
+ agent_id: str,
+ state: dict[str, Any],
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> State:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="create_state",
+ input={"task_id": task_id, "agent_id": agent_id, "state": state},
+ ) as span:
+ state_model = await self._agentex_client.states.create(
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state,
+ )
+ if span:
+ span.output = state_model.model_dump()
+ return state_model
+
+ async def get_state(
+ self,
+ state_id: str | None = None,
+ task_id: str | None = None,
+ agent_id: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> State | None:
+ trace = self._tracer.trace(trace_id) if self._tracer else None
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_state",
+ input={
+ "state_id": state_id,
+ "task_id": task_id,
+ "agent_id": agent_id,
+ },
+ ) as span:
+ if state_id:
+ state = await self._agentex_client.states.retrieve(state_id=state_id)
+ elif task_id and agent_id:
+ states = await self._agentex_client.states.list(
+ task_id=task_id,
+ agent_id=agent_id,
+ )
+ state = states[0] if states else None
+ else:
+ raise ValueError(
+ "Must provide either state_id or both task_id and agent_id"
+ )
+ if span:
+ span.output = state.model_dump() if state else None
+ return state
+
+ async def update_state(
+ self,
+ state_id: str,
+ task_id: str,
+ agent_id: str,
+ state: Dict[str, object],
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> State:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="update_state",
+ input={
+ "state_id": state_id,
+ "task_id": task_id,
+ "agent_id": agent_id,
+ "state": state,
+ },
+ ) as span:
+ state_model = await self._agentex_client.states.update(
+ state_id=state_id,
+ task_id=task_id,
+ agent_id=agent_id,
+ state=state,
+ )
+ if span:
+ span.output = state_model.model_dump()
+ return state_model
+
+ async def delete_state(
+ self,
+ state_id: str,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> State:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="delete_state",
+ input={"state_id": state_id},
+ ) as span:
+ state = await self._agentex_client.states.delete(state_id)
+ if span:
+ span.output = state.model_dump()
+ return state
diff --git a/src/agentex/lib/core/services/adk/streaming.py b/src/agentex/lib/core/services/adk/streaming.py
new file mode 100644
index 000000000..c89fa74da
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/streaming.py
@@ -0,0 +1,262 @@
+import json
+from typing import Literal, cast
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.streams.port import EventStreamRepository
+from agentex.lib.types.task_message_updates import (
+ TaskMessageDelta,
+ TaskMessageUpdate,
+ TextDelta,
+ DataDelta,
+ ToolRequestDelta,
+ ToolResponseDelta,
+ StreamTaskMessage,
+ StreamTaskMessageStart,
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ StreamTaskMessageDone,
+)
+from agentex.lib.utils.logging import make_logger
+from agentex.types.data_content import DataContent
+from agentex.types.task_message import (
+ TaskMessage,
+ TaskMessageContent,
+)
+from agentex.types.task_message_content_param import TaskMessageContentParam
+from agentex.types.text_content import TextContent
+from agentex.types.tool_request_content import ToolRequestContent
+from agentex.types.tool_response_content import ToolResponseContent
+
+logger = make_logger(__name__)
+
+
+def _get_stream_topic(task_id: str) -> str:
+ return f"task:{task_id}"
+
+
+class DeltaAccumulator:
+ def __init__(self):
+ self._accumulated_deltas: list[TaskMessageDelta] = []
+ self._delta_type: Literal["text", "data", "tool_request", "tool_response"] | None = None
+
+ def add_delta(self, delta: TaskMessageDelta):
+ if self._delta_type is None:
+ if delta.type == "text":
+ self._delta_type = "text"
+ elif delta.type == "data":
+ self._delta_type = "data"
+ elif delta.type == "tool_request":
+ self._delta_type = "tool_request"
+ elif delta.type == "tool_response":
+ self._delta_type = "tool_response"
+ else:
+ raise ValueError(f"Unknown delta type: {delta.type}")
+ else:
+ if self._delta_type != delta.type:
+ raise ValueError(
+ f"Delta type mismatch: {self._delta_type} != {delta.type}"
+ )
+
+ self._accumulated_deltas.append(delta)
+
+ def convert_to_content(self) -> TaskMessageContent:
+ if self._delta_type == "text":
+ # Type assertion: we know all deltas are TextDelta when _delta_type is TEXT
+ text_deltas = [delta for delta in self._accumulated_deltas if isinstance(delta, TextDelta)]
+ text_content_str = "".join(
+ [delta.text_delta or "" for delta in text_deltas]
+ )
+ return TextContent(
+ author="agent",
+ content=text_content_str,
+ )
+ elif self._delta_type == "data":
+ # Type assertion: we know all deltas are DataDelta when _delta_type is DATA
+ data_deltas = [delta for delta in self._accumulated_deltas if isinstance(delta, DataDelta)]
+ data_content_str = "".join(
+ [delta.data_delta or "" for delta in data_deltas]
+ )
+ try:
+ data = json.loads(data_content_str)
+ except json.JSONDecodeError as e:
+ raise ValueError(
+ f"Accumulated data content is not valid JSON: {data_content_str}"
+ ) from e
+ return DataContent(
+ author="agent",
+ data=data,
+ )
+ elif self._delta_type == "tool_request":
+ # Type assertion: we know all deltas are ToolRequestDelta when _delta_type is TOOL_REQUEST
+ tool_request_deltas = [delta for delta in self._accumulated_deltas if isinstance(delta, ToolRequestDelta)]
+ arguments_content_str = "".join(
+ [delta.arguments_delta or "" for delta in tool_request_deltas]
+ )
+ try:
+ arguments = json.loads(arguments_content_str)
+ except json.JSONDecodeError as e:
+ raise ValueError(
+ f"Accumulated tool request arguments is not valid JSON: {arguments_content_str}"
+ ) from e
+ return ToolRequestContent(
+ author="agent",
+ tool_call_id=tool_request_deltas[0].tool_call_id,
+ name=tool_request_deltas[0].name,
+ arguments=arguments,
+ )
+ elif self._delta_type == "tool_response":
+ # Type assertion: we know all deltas are ToolResponseDelta when _delta_type is TOOL_RESPONSE
+ tool_response_deltas = [delta for delta in self._accumulated_deltas if isinstance(delta, ToolResponseDelta)]
+ tool_response_content_str = "".join(
+ [delta.tool_response_delta or "" for delta in tool_response_deltas]
+ )
+ return ToolResponseContent(
+ author="agent",
+ tool_call_id=tool_response_deltas[0].tool_call_id,
+ name=tool_response_deltas[0].name,
+ content=tool_response_content_str,
+ )
+ else:
+ raise ValueError(f"Unknown delta type: {self._delta_type}")
+
+
+class StreamingTaskMessageContext:
+ def __init__(
+ self,
+ task_id: str,
+ initial_content: TaskMessageContent,
+ agentex_client: AsyncAgentex,
+ streaming_service: "StreamingService",
+ ):
+ self.task_id = task_id
+ self.initial_content = initial_content
+ self.task_message: TaskMessage | None = None
+ self._agentex_client = agentex_client
+ self._streaming_service = streaming_service
+ self._is_closed = False
+ self._delta_accumulator = DeltaAccumulator()
+
+ async def __aenter__(self) -> "StreamingTaskMessageContext":
+ return await self.open()
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ return await self.close()
+
+ async def open(self) -> "StreamingTaskMessageContext":
+ self._is_closed = False
+
+ self.task_message = await self._agentex_client.messages.create(
+ task_id=self.task_id,
+ content=self.initial_content.model_dump(),
+ streaming_status="IN_PROGRESS",
+ )
+
+ # Send the START event
+ start_event = StreamTaskMessageStart(
+ parent_task_message=self.task_message,
+ content=self.initial_content,
+ )
+ await self._streaming_service.stream_update(start_event)
+
+ return self
+
+ async def close(self) -> TaskMessage:
+ """Close the streaming context."""
+ if not self.task_message:
+ raise ValueError("Context not properly initialized - no task message")
+
+ if self._is_closed:
+ return self.task_message # Already done
+
+ # Send the DONE event
+ done_event = StreamTaskMessageDone(parent_task_message=self.task_message)
+ await self._streaming_service.stream_update(done_event)
+
+ # Update the task message with the final content
+ if self._delta_accumulator._accumulated_deltas:
+ self.task_message.content = self._delta_accumulator.convert_to_content()
+
+ await self._agentex_client.messages.update(
+ task_id=self.task_id,
+ message_id=self.task_message.id,
+ content=self.task_message.content.model_dump(),
+ streaming_status="DONE",
+ )
+
+ # Mark the context as done
+ self._is_closed = True
+ return self.task_message
+
+ async def stream_update(
+ self, update: StreamTaskMessage
+ ) -> StreamTaskMessage | None:
+ """Stream an update to the repository."""
+ if self._is_closed:
+ raise ValueError("Context is already done")
+
+ if not self.task_message:
+ raise ValueError("Context not properly initialized - no task message")
+
+ if isinstance(update, StreamTaskMessageDelta):
+ if update.delta is not None:
+ self._delta_accumulator.add_delta(update.delta)
+
+ result = await self._streaming_service.stream_update(update)
+
+ if isinstance(update, StreamTaskMessageDone):
+ await self.close()
+ return update
+ elif isinstance(update, StreamTaskMessageFull):
+ await self._agentex_client.messages.update(
+ task_id=self.task_id,
+ message_id=update.parent_task_message.id,
+ content=update.content.model_dump(),
+ streaming_status="DONE",
+ )
+ self._is_closed = True
+ return result
+
+
+class StreamingService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ stream_repository: EventStreamRepository,
+ ):
+ self._agentex_client = agentex_client
+ self._stream_repository = stream_repository
+
+ def streaming_task_message_context(
+ self,
+ task_id: str,
+ initial_content: TaskMessageContent,
+ ) -> StreamingTaskMessageContext:
+ return StreamingTaskMessageContext(
+ task_id=task_id,
+ initial_content=initial_content,
+ agentex_client=self._agentex_client,
+ streaming_service=self,
+ )
+
+ async def stream_update(
+ self, update: TaskMessageUpdate
+ ) -> TaskMessageUpdate | None:
+ """
+ Stream an update to the repository.
+
+ Args:
+ update: The update to stream
+
+ Returns:
+ True if event was streamed successfully, False otherwise
+ """
+ stream_topic = _get_stream_topic(update.parent_task_message.task_id)
+
+ try:
+ await self._stream_repository.send_event(
+ topic=stream_topic, event=update.model_dump(mode="json") # type: ignore
+ )
+ return update
+ except Exception as e:
+ logger.exception(f"Failed to stream event: {e}")
+ return None
diff --git a/src/agentex/lib/core/services/adk/tasks.py b/src/agentex/lib/core/services/adk/tasks.py
new file mode 100644
index 000000000..af4b8fbfd
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/tasks.py
@@ -0,0 +1,69 @@
+from agentex import AsyncAgentex
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.task import Task
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = make_logger(__name__)
+
+
+class TasksService:
+ def __init__(
+ self,
+ agentex_client: AsyncAgentex,
+ tracer: AsyncTracer,
+ ):
+ self._agentex_client = agentex_client
+ self._tracer = tracer
+
+ async def get_task(
+ self,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Task:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="get_task",
+ input={"task_id": task_id, "task_name": task_name},
+ ) as span:
+ heartbeat_if_in_workflow("get task")
+ if not task_id and not task_name:
+ raise ValueError("Either task_id or task_name must be provided.")
+ if task_id:
+ task_model = await self._agentex_client.tasks.retrieve(task_id=task_id)
+ elif task_name:
+ task_model = await self._agentex_client.tasks.retrieve_by_name(task_name=task_name)
+ else:
+ raise ValueError("Either task_id or task_name must be provided.")
+ if span:
+ span.output = task_model.model_dump()
+ return task_model
+
+ async def delete_task(
+ self,
+ task_id: str | None = None,
+ task_name: str | None = None,
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> Task:
+ trace = self._tracer.trace(trace_id) if self._tracer else None
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="delete_task",
+ input={"task_id": task_id, "task_name": task_name},
+ ) as span:
+ heartbeat_if_in_workflow("delete task")
+ if not task_id and not task_name:
+ raise ValueError("Either task_id or task_name must be provided.")
+ if task_id:
+ task_model = await self._agentex_client.tasks.delete(task_id=task_id)
+ elif task_name:
+ task_model = await self._agentex_client.tasks.delete_by_name(task_name=task_name)
+ else:
+ raise ValueError("Either task_id or task_name must be provided.")
+ if span:
+ span.output = task_model.model_dump()
+ return task_model
diff --git a/src/agentex/lib/core/services/adk/tracing.py b/src/agentex/lib/core/services/adk/tracing.py
new file mode 100644
index 000000000..0de69d210
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/tracing.py
@@ -0,0 +1,36 @@
+from typing import Any
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.types.span import Span
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = make_logger(__name__)
+
+
+class TracingService:
+ def __init__(self, tracer: AsyncTracer):
+ self._tracer = tracer
+
+ async def start_span(
+ self,
+ trace_id: str,
+ name: str,
+ parent_id: str | None = None,
+ input: list[Any] | dict[str, Any] | BaseModel | None = None,
+ data: list[Any] | dict[str, Any] | BaseModel | None = None,
+ ) -> Span | None:
+ trace = self._tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_id,
+ name=name,
+ input=input or {},
+ data=data,
+ ) as span:
+ heartbeat_if_in_workflow("start span")
+ return span if span else None
+
+ async def end_span(self, trace_id: str, span: Span) -> Span:
+ trace = self._tracer.trace(trace_id)
+ await trace.end_span(span)
+ return span
diff --git a/src/agentex/lib/core/services/adk/utils/__init__.py b/src/agentex/lib/core/services/adk/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/services/adk/utils/templating.py b/src/agentex/lib/core/services/adk/utils/templating.py
new file mode 100644
index 000000000..b9bf01b66
--- /dev/null
+++ b/src/agentex/lib/core/services/adk/utils/templating.py
@@ -0,0 +1,58 @@
+from datetime import datetime
+from typing import Any
+
+from jinja2 import BaseLoader, Environment
+
+from agentex.lib.core.tracing.tracer import AsyncTracer
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+# Create a Jinja environment
+JINJA_ENV = Environment(
+ loader=BaseLoader(),
+ trim_blocks=True,
+ lstrip_blocks=True,
+ extensions=["jinja2.ext.do"],
+)
+
+
+class TemplatingService:
+ def __init__(self, tracer: AsyncTracer | None = None):
+ self.tracer = tracer
+
+ async def render_jinja(
+ self,
+ template: str,
+ variables: dict[str, Any],
+ trace_id: str | None = None,
+ parent_span_id: str | None = None,
+ ) -> str:
+ """
+ Activity that renders a Jinja template with the provided data.
+
+ Args:
+ template: The template string to render.
+ variables: The variables to render the template with.
+ trace_id: The trace ID for tracing.
+ parent_span_id: The parent span ID for tracing.
+
+ Returns:
+ The rendered template as a string
+ """
+ trace = self.tracer.trace(trace_id)
+ async with trace.span(
+ parent_id=parent_span_id,
+ name="render_jinja",
+ input={"template": template, "variables": variables},
+ ) as span:
+ heartbeat_if_in_workflow("render jinja")
+ global_variables = {
+ "datetime": datetime,
+ }
+ jinja_template = JINJA_ENV.from_string(template, globals=global_variables)
+ try:
+ rendered_template = jinja_template.render(variables)
+ if span:
+ span.output = {"jinja_output": rendered_template}
+ return rendered_template
+ except Exception as e:
+ raise ValueError(f"Error rendering Jinja template: {str(e)}") from e
diff --git a/src/agentex/lib/core/temporal/__init__.py b/src/agentex/lib/core/temporal/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/activities/__init__.py b/src/agentex/lib/core/temporal/activities/__init__.py
new file mode 100644
index 000000000..d8d76053d
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/__init__.py
@@ -0,0 +1,207 @@
+from scale_gp import SGPClient, SGPClientError
+
+from agentex import AsyncAgentex
+from agentex.lib.core.adapters.llm.adapter_litellm import LiteLLMGateway
+from agentex.lib.core.adapters.streams.adapter_redis import RedisEventStreamRepository
+from agentex.lib.core.services.adk.acp.acp import ACPService
+from agentex.lib.core.services.adk.agent_task_tracker import AgentTaskTrackerService
+from agentex.lib.core.services.adk.events import EventsService
+from agentex.lib.core.services.adk.messages import MessagesService
+from agentex.lib.core.services.adk.providers.litellm import LiteLLMService
+from agentex.lib.core.services.adk.providers.openai import OpenAIService
+from agentex.lib.core.services.adk.providers.sgp import SGPService
+from agentex.lib.core.services.adk.state import StateService
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.core.services.adk.tasks import TasksService
+from agentex.lib.core.services.adk.tracing import TracingService
+from agentex.lib.core.services.adk.utils.templating import TemplatingService
+from agentex.lib.core.temporal.activities.adk.acp.acp_activities import ACPActivities
+from agentex.lib.core.temporal.activities.adk.agent_task_tracker_activities import (
+ AgentTaskTrackerActivities,
+)
+from agentex.lib.core.temporal.activities.adk.events_activities import EventsActivities
+from agentex.lib.core.temporal.activities.adk.messages_activities import MessagesActivities
+from agentex.lib.core.temporal.activities.adk.providers.litellm_activities import (
+ LiteLLMActivities,
+)
+from agentex.lib.core.temporal.activities.adk.providers.openai_activities import (
+ OpenAIActivities,
+)
+from agentex.lib.core.temporal.activities.adk.providers.sgp_activities import SGPActivities
+from agentex.lib.core.temporal.activities.adk.state_activities import StateActivities
+from agentex.lib.core.temporal.activities.adk.streaming_activities import (
+ StreamingActivities,
+)
+from agentex.lib.core.temporal.activities.adk.tasks_activities import TasksActivities
+from agentex.lib.core.temporal.activities.adk.tracing_activities import TracingActivities
+from agentex.lib.core.temporal.activities.adk.utils.templating_activities import (
+ TemplatingActivities,
+)
+from agentex.lib.core.tracing import AsyncTracer
+
+
+def get_all_activities(sgp_client=None):
+ """
+ Returns a list of all standard activity functions that can be directly passed to worker.run().
+
+ Args:
+ sgp_client: Optional SGP client instance. If not provided, SGP activities will not be included.
+
+ Returns:
+ list: A list of activity functions ready to be passed to worker.run()
+ """
+ # Initialize common dependencies
+ try:
+ sgp_client = SGPClient()
+ except SGPClientError:
+ sgp_client = None
+
+ llm_gateway = LiteLLMGateway()
+ stream_repository = RedisEventStreamRepository()
+ agentex_client = AsyncAgentex()
+ tracer = AsyncTracer(agentex_client)
+
+ # Services
+
+ ## ADK
+ streaming_service = StreamingService(
+ agentex_client=agentex_client,
+ stream_repository=stream_repository,
+ )
+ messages_service = MessagesService(
+ agentex_client=agentex_client,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ events_service = EventsService(
+ agentex_client=agentex_client,
+ tracer=tracer,
+ )
+ agent_task_tracker_service = AgentTaskTrackerService(
+ agentex_client=agentex_client,
+ tracer=tracer,
+ )
+ state_service = StateService(
+ agentex_client=agentex_client,
+ tracer=tracer,
+ )
+ tasks_service = TasksService(
+ agentex_client=agentex_client,
+ tracer=tracer,
+ )
+ tracing_service = TracingService(
+ tracer=tracer,
+ )
+
+ ## ACP
+ acp_service = ACPService(
+ agentex_client=agentex_client,
+ tracer=tracer,
+ )
+
+ ## Providers
+ litellm_service = LiteLLMService(
+ agentex_client=agentex_client,
+ llm_gateway=llm_gateway,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ openai_service = OpenAIService(
+ agentex_client=agentex_client,
+ streaming_service=streaming_service,
+ tracer=tracer,
+ )
+ sgp_service = None
+ if sgp_client is not None:
+ sgp_service = SGPService(
+ sgp_client=sgp_client,
+ tracer=tracer,
+ )
+
+ ## Utils
+ templating_service = TemplatingService(
+ tracer=tracer,
+ )
+
+ # ADK
+
+ ## Core activities
+ messages_activities = MessagesActivities(messages_service=messages_service)
+ events_activities = EventsActivities(events_service=events_service)
+ agent_task_tracker_activities = AgentTaskTrackerActivities(
+ agent_task_tracker_service=agent_task_tracker_service
+ )
+ state_activities = StateActivities(state_service=state_service)
+ streaming_activities = StreamingActivities(streaming_service=streaming_service)
+ tasks_activities = TasksActivities(tasks_service=tasks_service)
+ tracing_activities = TracingActivities(tracing_service=tracing_service)
+
+ ## ACP
+ acp_activities = ACPActivities(acp_service=acp_service)
+
+ ## Providers
+ litellm_activities = LiteLLMActivities(litellm_service=litellm_service)
+ openai_activities = OpenAIActivities(openai_service=openai_service)
+ if sgp_client is not None:
+ sgp_activities = SGPActivities(sgp_service=sgp_service)
+ else:
+ sgp_activities = None
+
+ ## Utils
+ templating_activities = TemplatingActivities(templating_service=templating_service)
+
+ # Build list of standard activities
+ activities = [
+ # Core activities
+ ## Messages activities
+ messages_activities.create_message,
+ messages_activities.update_message,
+ messages_activities.create_messages_batch,
+ messages_activities.update_messages_batch,
+ messages_activities.list_messages,
+ ## Events activities
+ events_activities.get_event,
+ events_activities.list_events,
+ ## Agent Task Tracker activities
+ agent_task_tracker_activities.get_agent_task_tracker,
+ agent_task_tracker_activities.get_agent_task_tracker_by_task_and_agent,
+ agent_task_tracker_activities.update_agent_task_tracker,
+ ## State activities
+ state_activities.create_state,
+ state_activities.get_state,
+ state_activities.update_state,
+ state_activities.delete_state,
+ ## Streaming activities
+ streaming_activities.stream_update,
+ ## Tasks activities
+ tasks_activities.get_task,
+ tasks_activities.delete_task,
+ ## Tracing activities
+ tracing_activities.start_span,
+ tracing_activities.end_span,
+ # ACP activities
+ acp_activities.task_create,
+ acp_activities.message_send,
+ acp_activities.event_send,
+ acp_activities.task_cancel,
+ # Providers
+ ## LiteLLM activities
+ litellm_activities.chat_completion,
+ litellm_activities.chat_completion_auto_send,
+ litellm_activities.chat_completion_stream_auto_send,
+ ## OpenAI activities
+ openai_activities.run_agent,
+ openai_activities.run_agent_auto_send,
+ openai_activities.run_agent_streamed_auto_send,
+ # Utils
+ templating_activities.render_jinja,
+ ]
+
+ # SGP activities
+ if sgp_client is not None:
+ sgp_all_activities = [
+ sgp_activities.download_file_content,
+ ]
+ activities.extend(sgp_all_activities)
+
+ return activities
diff --git a/src/agentex/lib/core/temporal/activities/activity_helpers.py b/src/agentex/lib/core/temporal/activities/activity_helpers.py
new file mode 100644
index 000000000..82e83c534
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/activity_helpers.py
@@ -0,0 +1,37 @@
+from datetime import timedelta
+from typing import Any, TypeVar
+
+from pydantic import TypeAdapter
+from temporalio import workflow
+from temporalio.common import RetryPolicy
+
+from agentex.lib.utils.model_utils import BaseModel
+
+T = TypeVar("T", bound="BaseModel")
+
+
+class ActivityHelpers:
+ @staticmethod
+ async def execute_activity(
+ activity_name: str,
+ request: BaseModel | str | int | float | bool | dict[str, Any] | list[Any],
+ response_type: Any,
+ start_to_close_timeout: timedelta | None = None,
+ heartbeat_timeout: timedelta | None = None,
+ retry_policy: RetryPolicy | None = None,
+ ) -> Any:
+ if start_to_close_timeout is None:
+ start_to_close_timeout = timedelta(seconds=10)
+ if retry_policy is None:
+ retry_policy = RetryPolicy(maximum_attempts=0)
+
+ response = await workflow.execute_activity(
+ activity=activity_name,
+ arg=request,
+ start_to_close_timeout=start_to_close_timeout,
+ retry_policy=retry_policy,
+ heartbeat_timeout=heartbeat_timeout,
+ )
+
+ adapter = TypeAdapter(response_type)
+ return adapter.validate_python(response)
diff --git a/src/agentex/lib/core/temporal/activities/adk/__init__.py b/src/agentex/lib/core/temporal/activities/adk/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/activities/adk/acp/__init__.py b/src/agentex/lib/core/temporal/activities/adk/acp/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/activities/adk/acp/acp_activities.py b/src/agentex/lib/core/temporal/activities/adk/acp/acp_activities.py
new file mode 100644
index 000000000..ebd23cc22
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/acp/acp_activities.py
@@ -0,0 +1,86 @@
+from enum import Enum
+from typing import Any
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.acp.acp import ACPService
+from agentex.types.event import Event
+from agentex.types.task_message import TaskMessage
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.types.task import Task
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class ACPActivityName(str, Enum):
+ TASK_CREATE = "task-create"
+ MESSAGE_SEND = "message-send"
+ EVENT_SEND = "event-send"
+ TASK_CANCEL = "task-cancel"
+
+
+class TaskCreateParams(BaseModelWithTraceParams):
+ name: str | None = None
+ agent_id: str | None = None
+ agent_name: str | None = None
+ params: dict[str, Any] | None = None
+
+
+class MessageSendParams(BaseModelWithTraceParams):
+ agent_id: str | None = None
+ agent_name: str | None = None
+ task_id: str | None = None
+ content: TaskMessageContent
+
+
+class EventSendParams(BaseModelWithTraceParams):
+ agent_id: str | None = None
+ agent_name: str | None = None
+ task_id: str | None = None
+ content: TaskMessageContent
+
+
+class TaskCancelParams(BaseModelWithTraceParams):
+ task_id: str | None = None
+ task_name: str | None = None
+
+
+class ACPActivities:
+ def __init__(self, acp_service: ACPService):
+ self._acp_service = acp_service
+
+ @activity.defn(name=ACPActivityName.TASK_CREATE)
+ async def task_create(self, params: TaskCreateParams) -> Task:
+ return await self._acp_service.task_create(
+ name=params.name,
+ agent_id=params.agent_id,
+ agent_name=params.agent_name,
+ params=params.params,
+ )
+
+ @activity.defn(name=ACPActivityName.MESSAGE_SEND)
+ async def message_send(self, params: MessageSendParams) -> TaskMessage:
+ return await self._acp_service.message_send(
+ agent_id=params.agent_id,
+ agent_name=params.agent_name,
+ task_id=params.task_id,
+ content=params.content,
+ )
+
+ @activity.defn(name=ACPActivityName.EVENT_SEND)
+ async def event_send(self, params: EventSendParams) -> Event:
+ return await self._acp_service.event_send(
+ agent_id=params.agent_id,
+ agent_name=params.agent_name,
+ task_id=params.task_id,
+ content=params.content,
+ )
+
+ @activity.defn(name=ACPActivityName.TASK_CANCEL)
+ async def task_cancel(self, params: TaskCancelParams) -> Task:
+ return await self._acp_service.task_cancel(
+ task_id=params.task_id,
+ task_name=params.task_name,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/agent_task_tracker_activities.py b/src/agentex/lib/core/temporal/activities/adk/agent_task_tracker_activities.py
new file mode 100644
index 000000000..5bd974d58
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/agent_task_tracker_activities.py
@@ -0,0 +1,76 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.agent_task_tracker import AgentTaskTrackerService
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+from agentex.types.agent_task_tracker import AgentTaskTracker
+
+logger = make_logger(__name__)
+
+
+class AgentTaskTrackerActivityName(str, Enum):
+ GET_AGENT_TASK_TRACKER = "get-agent-task-tracker"
+ GET_AGENT_TASK_TRACKER_BY_TASK_AND_AGENT = (
+ "get-agent-task-tracker-by-task-and-agent"
+ )
+ UPDATE_AGENT_TASK_TRACKER = "update-agent-task-tracker"
+
+
+class GetAgentTaskTrackerParams(BaseModelWithTraceParams):
+ tracker_id: str
+
+
+class GetAgentTaskTrackerByTaskAndAgentParams(BaseModelWithTraceParams):
+ task_id: str
+ agent_id: str
+
+
+class UpdateAgentTaskTrackerParams(BaseModelWithTraceParams):
+ tracker_id: str
+ last_processed_event_id: str | None
+ status: str | None
+ status_reason: str | None
+
+
+class AgentTaskTrackerActivities:
+ def __init__(self, agent_task_tracker_service: AgentTaskTrackerService):
+ self._agent_task_tracker_service = agent_task_tracker_service
+
+ @activity.defn(name=AgentTaskTrackerActivityName.GET_AGENT_TASK_TRACKER)
+ async def get_agent_task_tracker(
+ self, params: GetAgentTaskTrackerParams
+ ) -> AgentTaskTracker:
+ return await self._agent_task_tracker_service.get_agent_task_tracker(
+ tracker_id=params.tracker_id,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(
+ name=AgentTaskTrackerActivityName.GET_AGENT_TASK_TRACKER_BY_TASK_AND_AGENT
+ )
+ async def get_agent_task_tracker_by_task_and_agent(
+ self,
+ params: GetAgentTaskTrackerByTaskAndAgentParams,
+ ) -> AgentTaskTracker | None:
+ return await self._agent_task_tracker_service.get_by_task_and_agent(
+ task_id=params.task_id,
+ agent_id=params.agent_id,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=AgentTaskTrackerActivityName.UPDATE_AGENT_TASK_TRACKER)
+ async def update_agent_task_tracker(
+ self, params: UpdateAgentTaskTrackerParams
+ ) -> AgentTaskTracker:
+ return await self._agent_task_tracker_service.update_agent_task_tracker(
+ tracker_id=params.tracker_id,
+ last_processed_event_id=params.last_processed_event_id,
+ status=params.status,
+ status_reason=params.status_reason,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/agents_activities.py b/src/agentex/lib/core/temporal/activities/adk/agents_activities.py
new file mode 100644
index 000000000..ad51b72eb
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/agents_activities.py
@@ -0,0 +1,35 @@
+from enum import Enum
+from typing import Optional
+
+from agentex.lib.core.services.adk.agents import AgentsService
+from agentex.types.agent import Agent
+from temporalio import activity
+
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class AgentsActivityName(str, Enum):
+ GET_AGENT = "get-agent"
+
+
+class GetAgentParams(BaseModelWithTraceParams):
+ agent_id: Optional[str] = None
+ agent_name: Optional[str] = None
+
+
+class AgentsActivities:
+ def __init__(self, agents_service: AgentsService):
+ self._agents_service = agents_service
+
+ @activity.defn(name=AgentsActivityName.GET_AGENT)
+ async def get_agent(self, params: GetAgentParams) -> Agent | None:
+ return await self._agents_service.get_agent(
+ agent_id=params.agent_id,
+ agent_name=params.agent_name,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
diff --git a/src/agentex/lib/core/temporal/activities/adk/events_activities.py b/src/agentex/lib/core/temporal/activities/adk/events_activities.py
new file mode 100644
index 000000000..224bd1a60
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/events_activities.py
@@ -0,0 +1,50 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.events import EventsService
+from agentex.types.event import Event
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class EventsActivityName(str, Enum):
+ GET_EVENT = "get-event"
+ LIST_EVENTS = "list-events"
+
+
+class GetEventParams(BaseModelWithTraceParams):
+ event_id: str
+
+
+class ListEventsParams(BaseModelWithTraceParams):
+ task_id: str
+ agent_id: str
+ last_processed_event_id: str | None = None
+ limit: int | None = None
+
+
+class EventsActivities:
+ def __init__(self, events_service: EventsService):
+ self._events_service = events_service
+
+ @activity.defn(name=EventsActivityName.GET_EVENT)
+ async def get_event(self, params: GetEventParams) -> Event | None:
+ return await self._events_service.get_event(
+ event_id=params.event_id,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=EventsActivityName.LIST_EVENTS)
+ async def list_events(self, params: ListEventsParams) -> list[Event]:
+ return await self._events_service.list_events(
+ task_id=params.task_id,
+ agent_id=params.agent_id,
+ last_processed_event_id=params.last_processed_event_id,
+ limit=params.limit,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/messages_activities.py b/src/agentex/lib/core/temporal/activities/adk/messages_activities.py
new file mode 100644
index 000000000..8fdad8ddd
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/messages_activities.py
@@ -0,0 +1,94 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.messages import MessagesService
+from agentex.types.task_message import TaskMessage
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class MessagesActivityName(str, Enum):
+ CREATE_MESSAGE = "create-message"
+ UPDATE_MESSAGE = "update-message"
+ CREATE_MESSAGES_BATCH = "create-messages-batch"
+ UPDATE_MESSAGES_BATCH = "update-messages-batch"
+ LIST_MESSAGES = "list-messages"
+
+
+class CreateMessageParams(BaseModelWithTraceParams):
+ task_id: str
+ content: TaskMessageContent
+ emit_updates: bool = True
+
+
+class UpdateMessageParams(BaseModelWithTraceParams):
+ task_id: str
+ message_id: str
+ content: TaskMessageContent
+
+
+class CreateMessagesBatchParams(BaseModelWithTraceParams):
+ task_id: str
+ contents: list[TaskMessageContent]
+ emit_updates: bool = True
+
+
+class UpdateMessagesBatchParams(BaseModelWithTraceParams):
+ task_id: str
+ updates: dict[str, TaskMessageContent]
+
+
+class ListMessagesParams(BaseModelWithTraceParams):
+ task_id: str
+ limit: int | None = None
+
+
+class MessagesActivities:
+ def __init__(self, messages_service: MessagesService):
+ self._messages_service = messages_service
+
+ @activity.defn(name=MessagesActivityName.CREATE_MESSAGE)
+ async def create_message(self, params: CreateMessageParams) -> TaskMessage:
+ return await self._messages_service.create_message(
+ task_id=params.task_id,
+ content=params.content,
+ emit_updates=params.emit_updates,
+ )
+
+ @activity.defn(name=MessagesActivityName.UPDATE_MESSAGE)
+ async def update_message(self, params: UpdateMessageParams) -> TaskMessage:
+ return await self._messages_service.update_message(
+ task_id=params.task_id,
+ message_id=params.message_id,
+ content=params.content,
+ )
+
+ @activity.defn(name=MessagesActivityName.CREATE_MESSAGES_BATCH)
+ async def create_messages_batch(
+ self, params: CreateMessagesBatchParams
+ ) -> list[TaskMessage]:
+ return await self._messages_service.create_messages_batch(
+ task_id=params.task_id,
+ contents=params.contents,
+ emit_updates=params.emit_updates,
+ )
+
+ @activity.defn(name=MessagesActivityName.UPDATE_MESSAGES_BATCH)
+ async def update_messages_batch(
+ self, params: UpdateMessagesBatchParams
+ ) -> list[TaskMessage]:
+ return await self._messages_service.update_messages_batch(
+ task_id=params.task_id,
+ updates=params.updates,
+ )
+
+ @activity.defn(name=MessagesActivityName.LIST_MESSAGES)
+ async def list_messages(self, params: ListMessagesParams) -> list[TaskMessage]:
+ return await self._messages_service.list_messages(
+ task_id=params.task_id,
+ limit=params.limit,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/providers/__init__.py b/src/agentex/lib/core/temporal/activities/adk/providers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/activities/adk/providers/litellm_activities.py b/src/agentex/lib/core/temporal/activities/adk/providers/litellm_activities.py
new file mode 100644
index 000000000..70bb3c734
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/providers/litellm_activities.py
@@ -0,0 +1,71 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.providers.litellm import LiteLLMService
+from agentex.lib.types.llm_messages import Completion, LLMConfig
+from agentex.types.task_message import TaskMessage
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils import logging
+
+logger = logging.make_logger(__name__)
+
+
+class LiteLLMActivityName(str, Enum):
+ CHAT_COMPLETION = "chat-completion"
+ CHAT_COMPLETION_AUTO_SEND = "chat-completion-auto-send"
+ # Note: CHAT_COMPLETION_STREAM is not supported in Temporal due to generator limitations
+ CHAT_COMPLETION_STREAM_AUTO_SEND = "chat-completion-stream-auto-send"
+
+
+class ChatCompletionParams(BaseModelWithTraceParams):
+ llm_config: LLMConfig
+
+
+class ChatCompletionAutoSendParams(BaseModelWithTraceParams):
+ task_id: str
+ llm_config: LLMConfig
+
+
+class ChatCompletionStreamAutoSendParams(BaseModelWithTraceParams):
+ task_id: str
+ llm_config: LLMConfig
+
+
+class LiteLLMActivities:
+ def __init__(self, litellm_service: LiteLLMService):
+ self._litellm_service = litellm_service
+
+ @activity.defn(name=LiteLLMActivityName.CHAT_COMPLETION)
+ async def chat_completion(self, params: ChatCompletionParams) -> Completion:
+ return await self._litellm_service.chat_completion(
+ llm_config=params.llm_config,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=LiteLLMActivityName.CHAT_COMPLETION_AUTO_SEND)
+ async def chat_completion_auto_send(self, params: ChatCompletionAutoSendParams) -> TaskMessage | None:
+ """
+ Activity for non-streaming chat completion with automatic TaskMessage creation.
+ """
+ return await self._litellm_service.chat_completion_auto_send(
+ task_id=params.task_id,
+ llm_config=params.llm_config,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=LiteLLMActivityName.CHAT_COMPLETION_STREAM_AUTO_SEND)
+ async def chat_completion_stream_auto_send(
+ self, params: ChatCompletionStreamAutoSendParams
+ ) -> TaskMessage | None:
+ """
+ Activity for streaming chat completion with automatic TaskMessage creation.
+ """
+ return await self._litellm_service.chat_completion_stream_auto_send(
+ task_id=params.task_id,
+ llm_config=params.llm_config,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/providers/openai_activities.py b/src/agentex/lib/core/temporal/activities/adk/providers/openai_activities.py
new file mode 100644
index 000000000..4abd9a396
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/providers/openai_activities.py
@@ -0,0 +1,210 @@
+# Standard library imports
+from collections.abc import Callable
+from contextlib import AsyncExitStack, asynccontextmanager
+from enum import Enum
+from typing import Any, Literal
+
+from agents import RunResult, RunResultStreaming
+from agents.mcp import MCPServerStdio, MCPServerStdioParams
+from agents.model_settings import ModelSettings as OAIModelSettings
+from agents.tool import FunctionTool as OAIFunctionTool
+from mcp import StdioServerParameters
+from openai.types.responses.response_includable import ResponseIncludable
+from openai.types.shared.reasoning import Reasoning
+from temporalio import activity
+
+from agentex.lib.core.services.adk.providers.openai import OpenAIService
+
+# Local imports
+from agentex.lib.types.agent_results import (
+ SerializableRunResult,
+ SerializableRunResultStreaming,
+)
+
+# Third-party imports
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils import logging
+
+logger = logging.make_logger(__name__)
+
+
+class OpenAIActivityName(str, Enum):
+ """Names of OpenAI agent activities."""
+
+ RUN_AGENT = "run_agent"
+ RUN_AGENT_AUTO_SEND = "run_agent_auto_send"
+ # Note: RUN_AGENT_STREAMED is not supported in Temporal due to generator limitations
+ RUN_AGENT_STREAMED_AUTO_SEND = "run_agent_streamed_auto_send"
+
+
+class FunctionTool(BaseModelWithTraceParams):
+ name: str
+ description: str
+ params_json_schema: dict[str, Any]
+ on_invoke_tool: Callable[[dict[str, Any]], Any]
+ strict_json_schema: bool = True
+ is_enabled: bool = True
+
+ def to_oai_function_tool(self) -> OAIFunctionTool:
+ return OAIFunctionTool(**self.model_dump(exclude=["trace_id", "parent_span_id"]))
+
+
+class ModelSettings(BaseModelWithTraceParams):
+ temperature: float | None = None
+ top_p: float | None = None
+ frequency_penalty: float | None = None
+ presence_penalty: float | None = None
+ tool_choice: Literal["auto", "required", "none"] | str | None = None
+ parallel_tool_calls: bool | None = None
+ truncation: Literal["auto", "disabled"] | None = None
+ max_tokens: int | None = None
+ reasoning: Reasoning | None = None
+ metadata: dict[str, str] | None = None
+ store: bool | None = None
+ include_usage: bool | None = None
+ response_include: list[ResponseIncludable] | None = None
+ extra_body: dict[str, str] | None = None
+ extra_headers: dict[str, str] | None = None
+ extra_args: dict[str, Any] | None = None
+
+ def to_oai_model_settings(self) -> OAIModelSettings:
+ return OAIModelSettings(**self.model_dump(exclude=["trace_id", "parent_span_id"]))
+
+
+class RunAgentParams(BaseModelWithTraceParams):
+ """Parameters for running an agent without streaming."""
+
+ input_list: list[dict]
+ mcp_server_params: list[StdioServerParameters]
+ agent_name: str
+ agent_instructions: str
+ handoff_description: str | None = None
+ handoffs: list["RunAgentParams"] | None = None
+ model: str | None = None
+ model_settings: ModelSettings | None = None
+ tools: list[FunctionTool] | None = None
+ output_type: Any = None
+ tool_use_behavior: Literal["run_llm_again", "stop_on_first_tool"] = "run_llm_again"
+ mcp_timeout_seconds: int | None = None
+
+
+class RunAgentAutoSendParams(RunAgentParams):
+ """Parameters for running an agent with automatic TaskMessage creation."""
+
+ task_id: str
+
+
+class RunAgentStreamedAutoSendParams(RunAgentParams):
+ """Parameters for running an agent with streaming and automatic TaskMessage creation."""
+
+ task_id: str
+
+
+@asynccontextmanager
+async def mcp_server_context(mcp_server_params: list[StdioServerParameters]):
+ """Context manager for MCP servers."""
+ servers: list[MCPServerStdio] = []
+ for params in mcp_server_params:
+ server = MCPServerStdio(
+ name=f"Server: {params.command}",
+ params=MCPServerStdioParams(**params.model_dump()),
+ cache_tools_list=True,
+ client_session_timeout_seconds=60,
+ )
+ servers.append(server)
+
+ async with AsyncExitStack() as stack:
+ for server in servers:
+ await stack.enter_async_context(server)
+ yield servers
+
+
+class OpenAIActivities:
+ """Activities for OpenAI agent operations."""
+
+ def __init__(self, openai_service: OpenAIService):
+ self._openai_service = openai_service
+
+ @activity.defn(name=OpenAIActivityName.RUN_AGENT)
+ async def run_agent(self, params: RunAgentParams) -> SerializableRunResult:
+ """Run an agent without streaming or TaskMessage creation."""
+ result = await self._openai_service.run_agent(
+ input_list=params.input_list,
+ mcp_server_params=params.mcp_server_params,
+ agent_name=params.agent_name,
+ agent_instructions=params.agent_instructions,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ handoff_description=params.handoff_description,
+ handoffs=params.handoffs,
+ model=params.model,
+ model_settings=params.model_settings,
+ tools=params.tools,
+ output_type=params.output_type,
+ tool_use_behavior=params.tool_use_behavior,
+ )
+ return self._to_serializable_run_result(result)
+
+ @activity.defn(name=OpenAIActivityName.RUN_AGENT_AUTO_SEND)
+ async def run_agent_auto_send(
+ self, params: RunAgentAutoSendParams
+ ) -> SerializableRunResult:
+ """Run an agent with automatic TaskMessage creation."""
+ result = await self._openai_service.run_agent_auto_send(
+ task_id=params.task_id,
+ input_list=params.input_list,
+ mcp_server_params=params.mcp_server_params,
+ agent_name=params.agent_name,
+ agent_instructions=params.agent_instructions,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ handoff_description=params.handoff_description,
+ handoffs=params.handoffs,
+ model=params.model,
+ model_settings=params.model_settings,
+ tools=params.tools,
+ output_type=params.output_type,
+ tool_use_behavior=params.tool_use_behavior,
+ )
+ return self._to_serializable_run_result(result)
+
+ @activity.defn(name=OpenAIActivityName.RUN_AGENT_STREAMED_AUTO_SEND)
+ async def run_agent_streamed_auto_send(
+ self, params: RunAgentStreamedAutoSendParams
+ ) -> SerializableRunResultStreaming:
+ """Run an agent with streaming and automatic TaskMessage creation."""
+ result = await self._openai_service.run_agent_streamed_auto_send(
+ task_id=params.task_id,
+ input_list=params.input_list,
+ mcp_server_params=params.mcp_server_params,
+ agent_name=params.agent_name,
+ agent_instructions=params.agent_instructions,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ handoff_description=params.handoff_description,
+ handoffs=params.handoffs,
+ model=params.model,
+ model_settings=params.model_settings,
+ tools=params.tools,
+ output_type=params.output_type,
+ tool_use_behavior=params.tool_use_behavior,
+ )
+ return self._to_serializable_run_result_streaming(result)
+
+ @staticmethod
+ def _to_serializable_run_result(result: RunResult) -> SerializableRunResult:
+ """Convert RunResult to SerializableRunResult."""
+ return SerializableRunResult(
+ final_output=result.final_output,
+ final_input_list=result.to_input_list(),
+ )
+
+ @staticmethod
+ def _to_serializable_run_result_streaming(
+ result: RunResultStreaming,
+ ) -> SerializableRunResultStreaming:
+ """Convert RunResultStreaming to SerializableRunResultStreaming."""
+ return SerializableRunResultStreaming(
+ final_output=result.final_output,
+ final_input_list=result.to_input_list(),
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/providers/sgp_activities.py b/src/agentex/lib/core/temporal/activities/adk/providers/sgp_activities.py
new file mode 100644
index 000000000..98e93af6c
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/providers/sgp_activities.py
@@ -0,0 +1,42 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.providers.sgp import SGPService
+from agentex.lib.types.files import FileContentResponse
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class SGPActivityName(str, Enum):
+ DOWNLOAD_FILE_CONTENT = "download-file-content"
+
+
+class DownloadFileParams(BaseModelWithTraceParams):
+ file_id: str
+ filename: str
+
+
+class SGPActivities:
+ def __init__(self, sgp_service: SGPService):
+ self.sgp_service = sgp_service
+
+ @activity.defn(name=SGPActivityName.DOWNLOAD_FILE_CONTENT)
+ async def download_file_content(self, params: DownloadFileParams) -> FileContentResponse:
+ """
+ Download file content from SGP.
+
+ Args:
+ params: DownloadFileParams containing file_id and filename.
+
+ Returns:
+ FileContentResponse with mime_type and base64_content for constructing LLM input.
+ """
+ return await self.sgp_service.download_file_content(
+ file_id=params.file_id,
+ filename=params.filename,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/state_activities.py b/src/agentex/lib/core/temporal/activities/adk/state_activities.py
new file mode 100644
index 000000000..4b892c94c
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/state_activities.py
@@ -0,0 +1,85 @@
+from enum import Enum
+from typing import Any
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.state import StateService
+from agentex.types.state import State
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class StateActivityName(str, Enum):
+ CREATE_STATE = "create-state"
+ GET_STATE = "get-state"
+ UPDATE_STATE = "update-state"
+ DELETE_STATE = "delete-state"
+
+
+class CreateStateParams(BaseModelWithTraceParams):
+ task_id: str
+ agent_id: str
+ state: dict[str, Any]
+
+
+class GetStateParams(BaseModelWithTraceParams):
+ state_id: str | None = None
+ task_id: str | None = None
+ agent_id: str | None = None
+
+
+class UpdateStateParams(BaseModelWithTraceParams):
+ state_id: str
+ task_id: str
+ agent_id: str
+ state: dict[str, Any]
+
+
+class DeleteStateParams(BaseModelWithTraceParams):
+ state_id: str
+
+
+class StateActivities:
+ def __init__(self, state_service: StateService):
+ self._state_service = state_service
+
+ @activity.defn(name=StateActivityName.CREATE_STATE)
+ async def create_state(self, params: CreateStateParams) -> State:
+ return await self._state_service.create_state(
+ task_id=params.task_id,
+ agent_id=params.agent_id,
+ state=params.state,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=StateActivityName.GET_STATE)
+ async def get_state(self, params: GetStateParams) -> State | None:
+ return await self._state_service.get_state(
+ state_id=params.state_id,
+ task_id=params.task_id,
+ agent_id=params.agent_id,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=StateActivityName.UPDATE_STATE)
+ async def update_state(self, params: UpdateStateParams) -> State:
+ return await self._state_service.update_state(
+ state_id=params.state_id,
+ task_id=params.task_id,
+ agent_id=params.agent_id,
+ state=params.state,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=StateActivityName.DELETE_STATE)
+ async def delete_state(self, params: DeleteStateParams) -> State:
+ return await self._state_service.delete_state(
+ state_id=params.state_id,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/streaming_activities.py b/src/agentex/lib/core/temporal/activities/adk/streaming_activities.py
new file mode 100644
index 000000000..2c977f5d3
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/streaming_activities.py
@@ -0,0 +1,33 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.streaming import StreamingService
+from agentex.lib.types.task_message_updates import TaskMessageUpdate
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+from agentex.lib.utils.temporal import heartbeat_if_in_workflow
+
+logger = make_logger(__name__)
+
+
+class StreamingActivityName(str, Enum):
+ STREAM_UPDATE = "stream-update"
+
+
+class StreamUpdateParams(BaseModel):
+ update: TaskMessageUpdate
+
+
+class StreamingActivities:
+ """
+ Temporal activities for streaming events to clients (ADK pattern).
+ """
+
+ def __init__(self, streaming_service: StreamingService):
+ self._streaming_service = streaming_service
+
+ @activity.defn(name=StreamingActivityName.STREAM_UPDATE)
+ async def stream_update(self, params: StreamUpdateParams) -> TaskMessageUpdate | None:
+ heartbeat_if_in_workflow("stream update")
+ return await self._streaming_service.stream_update(update=params.update)
diff --git a/src/agentex/lib/core/temporal/activities/adk/tasks_activities.py b/src/agentex/lib/core/temporal/activities/adk/tasks_activities.py
new file mode 100644
index 000000000..6ce72a936
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/tasks_activities.py
@@ -0,0 +1,48 @@
+from enum import Enum
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.tasks import TasksService
+from agentex.types.task import Task
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class TasksActivityName(str, Enum):
+ GET_TASK = "get-task"
+ DELETE_TASK = "delete-task"
+
+
+class GetTaskParams(BaseModelWithTraceParams):
+ task_id: str | None = None
+ task_name: str | None = None
+
+
+class DeleteTaskParams(BaseModelWithTraceParams):
+ task_id: str | None = None
+ task_name: str | None = None
+
+
+class TasksActivities:
+ def __init__(self, tasks_service: TasksService):
+ self._tasks_service = tasks_service
+
+ @activity.defn(name=TasksActivityName.GET_TASK)
+ async def get_task(self, params: GetTaskParams) -> Task | None:
+ return await self._tasks_service.get_task(
+ task_id=params.task_id,
+ task_name=params.task_name,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
+
+ @activity.defn(name=TasksActivityName.DELETE_TASK)
+ async def delete_task(self, params: DeleteTaskParams) -> Task:
+ return await self._tasks_service.delete_task(
+ task_id=params.task_id,
+ task_name=params.task_name,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/tracing_activities.py b/src/agentex/lib/core/temporal/activities/adk/tracing_activities.py
new file mode 100644
index 000000000..56bde068c
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/tracing_activities.py
@@ -0,0 +1,55 @@
+from enum import Enum
+from typing import Any
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.tracing import TracingService
+from agentex.types.span import Span
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+
+logger = make_logger(__name__)
+
+
+class TracingActivityName(str, Enum):
+ START_SPAN = "start-span"
+ END_SPAN = "end-span"
+
+
+class StartSpanParams(BaseModel):
+ trace_id: str
+ parent_id: str | None = None
+ name: str
+ input: list[Any] | dict[str, Any] | BaseModel | None = None
+ data: list[Any] | dict[str, Any] | BaseModel | None = None
+
+
+class EndSpanParams(BaseModel):
+ trace_id: str
+ span: Span
+
+
+class TracingActivities:
+ """
+ Temporal activities for tracing (spans), ADK pattern.
+ """
+
+ def __init__(self, tracing_service: TracingService):
+ self._tracing_service = tracing_service
+
+ @activity.defn(name=TracingActivityName.START_SPAN)
+ async def start_span(self, params: StartSpanParams) -> Span | None:
+ return await self._tracing_service.start_span(
+ trace_id=params.trace_id,
+ parent_id=params.parent_id,
+ name=params.name,
+ input=params.input,
+ data=params.data,
+ )
+
+ @activity.defn(name=TracingActivityName.END_SPAN)
+ async def end_span(self, params: EndSpanParams) -> Span:
+ return await self._tracing_service.end_span(
+ trace_id=params.trace_id,
+ span=params.span,
+ )
diff --git a/src/agentex/lib/core/temporal/activities/adk/utils/__init__.py b/src/agentex/lib/core/temporal/activities/adk/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/activities/adk/utils/templating_activities.py b/src/agentex/lib/core/temporal/activities/adk/utils/templating_activities.py
new file mode 100644
index 000000000..3c284c7a7
--- /dev/null
+++ b/src/agentex/lib/core/temporal/activities/adk/utils/templating_activities.py
@@ -0,0 +1,41 @@
+from enum import Enum
+from typing import Any
+
+from temporalio import activity
+
+from agentex.lib.core.services.adk.utils.templating import TemplatingService
+from agentex.lib.types.tracing import BaseModelWithTraceParams
+
+
+class JinjaActivityName(str, Enum):
+ RENDER_JINJA = "render-jinja"
+
+
+class RenderJinjaParams(BaseModelWithTraceParams):
+ """Parameters for the Jinja activity"""
+
+ template: str
+ variables: dict[str, Any]
+
+
+class TemplatingActivities:
+ def __init__(self, templating_service: TemplatingService):
+ self.templating_service = templating_service
+
+ @activity.defn(name=JinjaActivityName.RENDER_JINJA)
+ async def render_jinja(self, params: RenderJinjaParams) -> str:
+ """
+ Activity that renders a Jinja template with the provided data.
+
+ Args:
+ params: JinjaParams containing the data and template string
+
+ Returns:
+ The rendered template as a string
+ """
+ return await self.templating_service.render_jinja(
+ template=params.template,
+ variables=params.variables,
+ trace_id=params.trace_id,
+ parent_span_id=params.parent_span_id,
+ )
diff --git a/src/agentex/lib/core/temporal/services/__init__.py b/src/agentex/lib/core/temporal/services/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/services/temporal_task_service.py b/src/agentex/lib/core/temporal/services/temporal_task_service.py
new file mode 100644
index 000000000..0671af0ef
--- /dev/null
+++ b/src/agentex/lib/core/temporal/services/temporal_task_service.py
@@ -0,0 +1,69 @@
+from agentex.lib.core.clients.temporal.temporal_client import TemporalClient
+from agentex.lib.core.clients.temporal.types import WorkflowState
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.environment_variables import EnvironmentVariables
+from agentex.lib.types.acp import CreateTaskParams
+from agentex.lib.types.acp import SendEventParams
+from agentex.types.agent import Agent
+from agentex.types.event import Event
+from agentex.types.task import Task
+
+
+class TemporalTaskService:
+ """
+ Submits Agent agent_tasks to the async runtime for execution.
+ """
+
+ def __init__(
+ self,
+ temporal_client: TemporalClient,
+ env_vars: EnvironmentVariables,
+ ):
+ self._temporal_client = temporal_client
+ self._env_vars = env_vars
+
+ async def submit_task(self, agent: Agent, task: Task) -> str:
+ """
+ Submit a task to the async runtime for execution.
+
+ returns the workflow ID of the temporal workflow
+ """
+ return await self._temporal_client.start_workflow(
+ workflow=self._env_vars.WORKFLOW_NAME,
+ arg=CreateTaskParams(
+ agent=agent,
+ task=task,
+ params=None,
+ ),
+ id=task.id,
+ task_queue=self._env_vars.WORKFLOW_TASK_QUEUE,
+ )
+
+ async def get_state(self, task_id: str) -> WorkflowState:
+ """
+ Get the task state from the async runtime.
+ """
+ return await self._temporal_client.get_workflow_status(
+ workflow_id=task_id,
+ )
+
+ async def send_event(self, agent: Agent, task: Task, event: Event) -> None:
+ return await self._temporal_client.send_signal(
+ workflow_id=task.id,
+ signal=SignalName.RECEIVE_EVENT.value,
+ payload=SendEventParams(
+ agent=agent,
+ task=task,
+ event=event,
+ ).model_dump(),
+ )
+
+ async def cancel(self, task_id: str) -> None:
+ return await self._temporal_client.cancel_workflow(
+ workflow_id=task_id,
+ )
+
+ async def terminate(self, task_id: str) -> None:
+ return await self._temporal_client.terminate_workflow(
+ workflow_id=task_id,
+ )
diff --git a/src/agentex/lib/core/temporal/types/__init__.py b/src/agentex/lib/core/temporal/types/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/types/workflow.py b/src/agentex/lib/core/temporal/types/workflow.py
new file mode 100644
index 000000000..973bb52c2
--- /dev/null
+++ b/src/agentex/lib/core/temporal/types/workflow.py
@@ -0,0 +1,5 @@
+from enum import Enum
+
+
+class SignalName(str, Enum):
+ RECEIVE_EVENT = "receive_event"
diff --git a/src/agentex/lib/core/temporal/workers/__init__.py b/src/agentex/lib/core/temporal/workers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/core/temporal/workers/worker.py b/src/agentex/lib/core/temporal/workers/worker.py
new file mode 100644
index 000000000..b3411d9ea
--- /dev/null
+++ b/src/agentex/lib/core/temporal/workers/worker.py
@@ -0,0 +1,162 @@
+import dataclasses
+import datetime
+import os
+import uuid
+from collections.abc import Callable
+from concurrent.futures import ThreadPoolExecutor
+from typing import Any
+
+from aiohttp import web
+from temporalio.client import Client
+from temporalio.converter import (
+ AdvancedJSONEncoder,
+ CompositePayloadConverter,
+ DataConverter,
+ DefaultPayloadConverter,
+ JSONPlainPayloadConverter,
+ JSONTypeConverter,
+ _JSONTypeConverterUnhandled,
+)
+from temporalio.runtime import OpenTelemetryConfig, Runtime, TelemetryConfig
+from temporalio.worker import (
+ UnsandboxedWorkflowRunner,
+ Worker,
+)
+
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class DateTimeJSONEncoder(AdvancedJSONEncoder):
+ def default(self, o: Any) -> Any:
+ if isinstance(o, datetime.datetime):
+ return o.isoformat()
+ return super().default(o)
+
+
+class DateTimeJSONTypeConverter(JSONTypeConverter):
+ def to_typed_value(
+ self, hint: type, value: Any
+ ) -> Any | None | _JSONTypeConverterUnhandled:
+ if hint == datetime.datetime:
+ return datetime.datetime.fromisoformat(value)
+ return JSONTypeConverter.Unhandled
+
+
+class DateTimePayloadConverter(CompositePayloadConverter):
+ def __init__(self) -> None:
+ json_converter = JSONPlainPayloadConverter(
+ encoder=DateTimeJSONEncoder,
+ custom_type_converters=[DateTimeJSONTypeConverter()],
+ )
+ super().__init__(
+ *[
+ c if not isinstance(c, JSONPlainPayloadConverter) else json_converter
+ for c in DefaultPayloadConverter.default_encoding_payload_converters
+ ]
+ )
+
+
+custom_data_converter = dataclasses.replace(
+ DataConverter.default,
+ payload_converter_class=DateTimePayloadConverter,
+)
+
+
+async def get_temporal_client(temporal_address: str, metrics_url: str = None) -> Client:
+ if not metrics_url:
+ client = await Client.connect(
+ target_host=temporal_address, data_converter=custom_data_converter
+ )
+ else:
+ runtime = Runtime(
+ telemetry=TelemetryConfig(metrics=OpenTelemetryConfig(url=metrics_url))
+ )
+ client = await Client.connect(
+ target_host=temporal_address,
+ data_converter=custom_data_converter,
+ runtime=runtime,
+ )
+ return client
+
+
+class AgentexWorker:
+ def __init__(
+ self,
+ task_queue,
+ max_workers: int = 10,
+ max_concurrent_activities: int = 10,
+ health_check_port: int = 80,
+ ):
+ self.task_queue = task_queue
+ self.activity_handles = []
+ self.max_workers = max_workers
+ self.max_concurrent_activities = max_concurrent_activities
+ self.health_check_server_running = False
+ self.healthy = False
+ self.health_check_port = health_check_port
+
+ async def run(
+ self,
+ activities: list[Callable],
+ workflow: type,
+ ):
+ await self.start_health_check_server()
+ temporal_client = await get_temporal_client(
+ temporal_address=os.environ.get("TEMPORAL_ADDRESS", "localhost:7233"),
+ )
+ worker = Worker(
+ client=temporal_client,
+ task_queue=self.task_queue,
+ activity_executor=ThreadPoolExecutor(max_workers=self.max_workers),
+ workflows=[workflow],
+ activities=activities,
+ workflow_runner=UnsandboxedWorkflowRunner(),
+ max_concurrent_activities=self.max_concurrent_activities,
+ build_id=str(uuid.uuid4()),
+ )
+
+ logger.info(f"Starting workers for task queue: {self.task_queue}")
+ # Eagerly set the worker status to healthy
+ self.healthy = True
+ logger.info(f"Running workers for task queue: {self.task_queue}")
+ await worker.run()
+
+ async def _health_check(self):
+ return web.json_response(self.healthy)
+
+ async def start_health_check_server(self):
+ if not self.health_check_server_running:
+ app = web.Application()
+ app.router.add_get("/readyz", lambda request: self._health_check())
+
+ # Disable access logging
+ runner = web.AppRunner(app, access_log=None)
+ await runner.setup()
+
+ try:
+ site = web.TCPSite(runner, "0.0.0.0", self.health_check_port)
+ await site.start()
+ logger.info(
+ f"Health check server running on http://0.0.0.0:{self.health_check_port}/readyz"
+ )
+ self.health_check_server_running = True
+ except OSError as e:
+ logger.error(
+ f"Failed to start health check server on port {self.health_check_port}: {e}"
+ )
+ # Try alternative port if default fails
+ try:
+ alt_port = self.health_check_port + 1
+ site = web.TCPSite(runner, "0.0.0.0", alt_port)
+ await site.start()
+ logger.info(
+ f"Health check server running on alternative port http://0.0.0.0:{alt_port}/readyz"
+ )
+ self.health_check_server_running = True
+ except OSError as e:
+ logger.error(
+ f"Failed to start health check server on alternative port {alt_port}: {e}"
+ )
+ raise
diff --git a/src/agentex/lib/core/temporal/workflows/workflow.py b/src/agentex/lib/core/temporal/workflows/workflow.py
new file mode 100644
index 000000000..37325ee87
--- /dev/null
+++ b/src/agentex/lib/core/temporal/workflows/workflow.py
@@ -0,0 +1,26 @@
+from abc import ABC, abstractmethod
+
+from temporalio import workflow
+
+from agentex.lib.core.temporal.types.workflow import SignalName
+from agentex.lib.types.acp import CreateTaskParams, SendEventParams
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class BaseWorkflow(ABC):
+ def __init__(
+ self,
+ display_name: str,
+ ):
+ self.display_name = display_name
+
+ @abstractmethod
+ @workflow.signal(name=SignalName.RECEIVE_EVENT)
+ async def on_task_event_send(self, params: SendEventParams) -> None:
+ raise NotImplementedError
+
+ @abstractmethod
+ async def on_task_create(self, params: CreateTaskParams) -> None:
+ raise NotImplementedError
diff --git a/src/agentex/lib/core/tracing/__init__.py b/src/agentex/lib/core/tracing/__init__.py
new file mode 100644
index 000000000..f2f495f2f
--- /dev/null
+++ b/src/agentex/lib/core/tracing/__init__.py
@@ -0,0 +1,5 @@
+from agentex.lib.core.tracing.trace import AsyncTrace, Trace
+from agentex.lib.core.tracing.tracer import AsyncTracer, Tracer
+from agentex.types.span import Span
+
+__all__ = ["Trace", "AsyncTrace", "Span", "Tracer", "AsyncTracer"]
diff --git a/src/agentex/lib/core/tracing/processors/agentex_tracing_processor.py b/src/agentex/lib/core/tracing/processors/agentex_tracing_processor.py
new file mode 100644
index 000000000..7630f5faf
--- /dev/null
+++ b/src/agentex/lib/core/tracing/processors/agentex_tracing_processor.py
@@ -0,0 +1,117 @@
+from typing import Any, Dict, override
+
+from agentex import Agentex, AsyncAgentex
+from agentex.lib.core.tracing.processors.tracing_processor_interface import (
+ AsyncTracingProcessor,
+ SyncTracingProcessor,
+)
+from agentex.types.span import Span
+from agentex.lib.types.tracing import AgentexTracingProcessorConfig
+
+
+class AgentexSyncTracingProcessor(SyncTracingProcessor):
+ def __init__(self, config: AgentexTracingProcessorConfig):
+ self.client = Agentex()
+
+ @override
+ def on_span_start(self, span: Span) -> None:
+ self.client.spans.create(
+ name=span.name,
+ start_time=span.start_time,
+ end_time=span.end_time,
+ trace_id=span.trace_id,
+ id=span.id,
+ data=span.data,
+ input=span.input,
+ output=span.output,
+ parent_id=span.parent_id,
+ )
+
+ @override
+ def on_span_end(self, span: Span) -> None:
+ update: Dict[str, Any] = {}
+ if span.trace_id:
+ update["trace_id"] = span.trace_id
+ if span.name:
+ update["name"] = span.name
+ if span.parent_id:
+ update["parent_id"] = span.parent_id
+ if span.start_time:
+ update["start_time"] = span.start_time.isoformat()
+ if span.end_time is not None:
+ update["end_time"] = span.end_time.isoformat()
+ if span.input is not None:
+ update["input"] = span.input
+ if span.output is not None:
+ update["output"] = span.output
+ if span.data is not None:
+ update["data"] = span.data
+
+ self.client.spans.update(
+ span.id,
+ **span.model_dump(
+ mode="json",
+ exclude={"id"},
+ exclude_defaults=True,
+ exclude_none=True,
+ exclude_unset=True,
+ ),
+ )
+
+ @override
+ def shutdown(self) -> None:
+ pass
+
+
+class AgentexAsyncTracingProcessor(AsyncTracingProcessor):
+ def __init__(self, config: AgentexTracingProcessorConfig):
+ self.client = AsyncAgentex()
+
+ @override
+ async def on_span_start(self, span: Span) -> None:
+ await self.client.spans.create(
+ name=span.name,
+ start_time=span.start_time,
+ end_time=span.end_time,
+ id=span.id,
+ trace_id=span.trace_id,
+ parent_id=span.parent_id,
+ input=span.input,
+ output=span.output,
+ data=span.data,
+ )
+
+ @override
+ async def on_span_end(self, span: Span) -> None:
+ update: Dict[str, Any] = {}
+ if span.trace_id:
+ update["trace_id"] = span.trace_id
+ if span.name:
+ update["name"] = span.name
+ if span.parent_id:
+ update["parent_id"] = span.parent_id
+ if span.start_time:
+ update["start_time"] = span.start_time.isoformat()
+ if span.end_time:
+ update["end_time"] = span.end_time.isoformat()
+ if span.input:
+ update["input"] = span.input
+ if span.output:
+ update["output"] = span.output
+ if span.data:
+ update["data"] = span.data
+
+ await self.client.spans.update(
+ span.id,
+ **span.model_dump(
+ mode="json",
+ exclude={"id"},
+ exclude_defaults=True,
+ exclude_none=True,
+ exclude_unset=True,
+ ),
+ )
+
+ @override
+ async def shutdown(self) -> None:
+ pass
diff --git a/src/agentex/lib/core/tracing/processors/sgp_tracing_processor.py b/src/agentex/lib/core/tracing/processors/sgp_tracing_processor.py
new file mode 100644
index 000000000..f614f78e9
--- /dev/null
+++ b/src/agentex/lib/core/tracing/processors/sgp_tracing_processor.py
@@ -0,0 +1,119 @@
+from typing import override
+
+import scale_gp_beta.lib.tracing as tracing
+from scale_gp_beta import AsyncSGPClient, SGPClient
+from scale_gp_beta.lib.tracing import create_span, flush_queue
+from scale_gp_beta.lib.tracing.span import Span as SGPSpan
+
+from agentex.lib.core.tracing.processors.tracing_processor_interface import (
+ AsyncTracingProcessor,
+ SyncTracingProcessor,
+)
+from agentex.types.span import Span
+from agentex.lib.types.tracing import SGPTracingProcessorConfig
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class SGPSyncTracingProcessor(SyncTracingProcessor):
+ def __init__(self, config: SGPTracingProcessorConfig):
+ disabled = config.sgp_api_key == "" or config.sgp_account_id == ""
+ tracing.init(
+ SGPClient(api_key=config.sgp_api_key, account_id=config.sgp_account_id),
+ disabled=disabled,
+ )
+ self._spans: dict[str, SGPSpan] = {}
+
+ @override
+ def on_span_start(self, span: Span) -> None:
+ sgp_span = create_span(
+ name=span.name,
+ span_id=span.id,
+ parent_id=span.parent_id,
+ trace_id=span.trace_id,
+ input=span.input,
+ output=span.output,
+ metadata=span.data,
+ )
+ sgp_span.start_time = span.start_time.isoformat()
+ sgp_span.flush(blocking=False)
+
+ self._spans[span.id] = sgp_span
+
+ @override
+ def on_span_end(self, span: Span) -> None:
+ sgp_span = self._spans.get(span.id)
+ if sgp_span is None:
+ logger.warning(
+ f"Span {span.id} not found in stored spans, skipping span end"
+ )
+ return
+
+ sgp_span.output = span.output
+ sgp_span.metadata = span.data
+ sgp_span.end_time = span.end_time.isoformat()
+ sgp_span.flush(blocking=False)
+
+ @override
+ def shutdown(self) -> None:
+ self._spans.clear()
+ flush_queue()
+
+
+class SGPAsyncTracingProcessor(AsyncTracingProcessor):
+ def __init__(self, config: SGPTracingProcessorConfig):
+ self.disabled = config.sgp_api_key == "" or config.sgp_account_id == ""
+ self._spans: dict[str, SGPSpan] = {}
+ self.sgp_async_client = (
+ AsyncSGPClient(api_key=config.sgp_api_key, account_id=config.sgp_account_id)
+ if not self.disabled
+ else None
+ )
+
+ @override
+ async def on_span_start(self, span: Span) -> None:
+ sgp_span = create_span(
+ name=span.name,
+ span_id=span.id,
+ parent_id=span.parent_id,
+ trace_id=span.trace_id,
+ input=span.input,
+ output=span.output,
+ metadata=span.data,
+ )
+ sgp_span.start_time = span.start_time.isoformat()
+
+ if self.disabled:
+ return
+ await self.sgp_async_client.spans.upsert_batch(
+ items=[sgp_span.to_request_params()]
+ )
+
+ self._spans[span.id] = sgp_span
+
+ @override
+ async def on_span_end(self, span: Span) -> None:
+ sgp_span = self._spans.get(span.id)
+ if sgp_span is None:
+ logger.warning(
+ f"Span {span.id} not found in stored spans, skipping span end"
+ )
+ return
+
+ sgp_span.output = span.output
+ sgp_span.metadata = span.data
+ sgp_span.end_time = span.end_time.isoformat()
+
+ if self.disabled:
+ return
+ await self.sgp_async_client.spans.upsert_batch(
+ items=[sgp_span.to_request_params()]
+ )
+
+ @override
+ async def shutdown(self) -> None:
+ await self.sgp_async_client.spans.upsert_batch(
+ items=[sgp_span.to_request_params() for sgp_span in self._spans.values()]
+ )
+ self._spans.clear()
diff --git a/src/agentex/lib/core/tracing/processors/tracing_processor_interface.py b/src/agentex/lib/core/tracing/processors/tracing_processor_interface.py
new file mode 100644
index 000000000..7b7cc7181
--- /dev/null
+++ b/src/agentex/lib/core/tracing/processors/tracing_processor_interface.py
@@ -0,0 +1,40 @@
+from abc import ABC, abstractmethod
+
+from agentex.lib.types.tracing import TracingProcessorConfig
+from agentex.types.span import Span
+
+
+class SyncTracingProcessor(ABC):
+ @abstractmethod
+ def __init__(self, config: TracingProcessorConfig):
+ pass
+
+ @abstractmethod
+ def on_span_start(self, span: Span) -> None:
+ pass
+
+ @abstractmethod
+ def on_span_end(self, span: Span) -> None:
+ pass
+
+ @abstractmethod
+ def shutdown(self) -> None:
+ pass
+
+
+class AsyncTracingProcessor(ABC):
+ @abstractmethod
+ def __init__(self, config: TracingProcessorConfig):
+ pass
+
+ @abstractmethod
+ async def on_span_start(self, span: Span) -> None:
+ pass
+
+ @abstractmethod
+ async def on_span_end(self, span: Span) -> None:
+ pass
+
+ @abstractmethod
+ async def shutdown(self) -> None:
+ pass
diff --git a/src/agentex/lib/core/tracing/trace.py b/src/agentex/lib/core/tracing/trace.py
new file mode 100644
index 000000000..6547dd439
--- /dev/null
+++ b/src/agentex/lib/core/tracing/trace.py
@@ -0,0 +1,311 @@
+import asyncio
+from contextlib import asynccontextmanager, contextmanager
+from datetime import UTC, datetime
+from typing import Any, AsyncGenerator
+import uuid
+
+from pydantic import BaseModel
+
+from agentex import Agentex, AsyncAgentex
+from agentex.lib.core.tracing.processors.tracing_processor_interface import (
+ AsyncTracingProcessor,
+ SyncTracingProcessor,
+)
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import recursive_model_dump
+from agentex.types.span import Span
+
+logger = make_logger(__name__)
+
+
+class Trace:
+ """
+ Trace is a wrapper around the Agentex API for tracing.
+ It provides a context manager for spans and a way to start and end spans.
+ It also provides a way to get spans by ID and list all spans in a trace.
+ """
+
+ def __init__(
+ self,
+ processors: list[SyncTracingProcessor],
+ client: Agentex,
+ trace_id: str | None = None,
+ ):
+ """
+ Initialize a new trace with the specified trace ID.
+
+ Args:
+ trace_id: Required trace ID to use for this trace.
+ processors: Optional list of tracing processors to use for this trace.
+ """
+ self.processors = processors
+ self.client = client
+ self.trace_id = trace_id
+
+ def start_span(
+ self,
+ name: str,
+ parent_id: str | None = None,
+ input: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ data: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ ) -> Span:
+ """
+ Start a new span and register it with the API.
+
+ Args:
+ name: Name of the span.
+ parent_id: Optional parent span ID.
+ input: Optional input data for the span.
+ data: Optional additional data for the span.
+
+ Returns:
+ The newly created span.
+ """
+
+ if not self.trace_id:
+ raise ValueError("Trace ID is required to start a span")
+
+ # Create a span using the client's spans resource
+ start_time = datetime.now(UTC)
+
+ serialized_input = recursive_model_dump(input) if input else None
+ serialized_data = recursive_model_dump(data) if data else None
+ id = str(uuid.uuid4())
+
+ span = Span(
+ id=id,
+ trace_id=self.trace_id,
+ name=name,
+ parent_id=parent_id,
+ start_time=start_time,
+ input=serialized_input,
+ data=serialized_data,
+ )
+
+ for processor in self.processors:
+ processor.on_span_start(span)
+
+ return span
+
+ def end_span(
+ self,
+ span: Span,
+ ) -> Span:
+ """
+ End a span by updating it with any changes made to the span object.
+
+ Args:
+ span: The span object to update.
+
+ Returns:
+ The updated span.
+ """
+ if span.end_time is None:
+ span.end_time = datetime.now(UTC)
+
+ span.input = recursive_model_dump(span.input) if span.input else None
+ span.output = recursive_model_dump(span.output) if span.output else None
+ span.data = recursive_model_dump(span.data) if span.data else None
+
+ for processor in self.processors:
+ processor.on_span_end(span)
+
+ return span
+
+ def get_span(self, span_id: str) -> Span:
+ """
+ Get a span by ID.
+
+ Args:
+ span_id: The ID of the span to get.
+
+ Returns:
+ The requested span.
+ """
+ # Query from Agentex API
+ span = self.client.spans.retrieve(span_id)
+ return span
+
+ def list_spans(self) -> list[Span]:
+ """
+ List all spans in this trace.
+
+ Returns:
+ List of spans in this trace.
+ """
+ # Query from Agentex API
+ spans = self.client.spans.list(trace_id=self.trace_id)
+ return spans
+
+ @contextmanager
+ def span(
+ self,
+ name: str,
+ parent_id: str | None = None,
+ input: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ data: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ ):
+ """
+ Context manager for spans.
+ If trace_id is falsy, acts as a no-op context manager.
+ """
+ if not self.trace_id:
+ yield None
+ return
+ span = self.start_span(name, parent_id, input, data)
+ try:
+ yield span
+ finally:
+ self.end_span(span)
+
+
+class AsyncTrace:
+ """
+ AsyncTrace is a wrapper around the Agentex API for tracing.
+ It provides a context manager for spans and a way to start and end spans.
+ It also provides a way to get spans by ID and list all spans in a trace.
+ """
+
+ def __init__(
+ self,
+ processors: list[AsyncTracingProcessor],
+ client: AsyncAgentex,
+ trace_id: str | None = None,
+ ):
+ """
+ Initialize a new trace with the specified trace ID.
+
+ Args:
+ trace_id: Required trace ID to use for this trace.
+ processors: Optional list of tracing processors to use for this trace.
+ """
+ self.processors = processors
+ self.client = client
+ self.trace_id = trace_id
+
+ async def start_span(
+ self,
+ name: str,
+ parent_id: str | None = None,
+ input: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ data: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ ) -> Span:
+ """
+ Start a new span and register it with the API.
+
+ Args:
+ name: Name of the span.
+ parent_id: Optional parent span ID.
+ input: Optional input data for the span.
+ data: Optional additional data for the span.
+
+ Returns:
+ The newly created span.
+ """
+ if not self.trace_id:
+ raise ValueError("Trace ID is required to start a span")
+
+ # Create a span using the client's spans resource
+ start_time = datetime.now(UTC)
+
+ serialized_input = recursive_model_dump(input) if input else None
+ serialized_data = recursive_model_dump(data) if data else None
+ id = str(uuid.uuid4())
+
+ span = Span(
+ id=id,
+ trace_id=self.trace_id,
+ name=name,
+ parent_id=parent_id,
+ start_time=start_time,
+ input=serialized_input,
+ data=serialized_data,
+ )
+
+ if self.processors:
+ await asyncio.gather(
+ *[processor.on_span_start(span) for processor in self.processors]
+ )
+
+ return span
+
+ async def end_span(
+ self,
+ span: Span,
+ ) -> Span:
+ """
+ End a span by updating it with any changes made to the span object.
+
+ Args:
+ span: The span object to update.
+
+ Returns:
+ The updated span.
+ """
+ if span.end_time is None:
+ span.end_time = datetime.now(UTC)
+
+ span.input = recursive_model_dump(span.input) if span.input else None
+ span.output = recursive_model_dump(span.output) if span.output else None
+ span.data = recursive_model_dump(span.data) if span.data else None
+
+ if self.processors:
+ await asyncio.gather(
+ *[processor.on_span_end(span) for processor in self.processors]
+ )
+
+ return span
+
+ async def get_span(self, span_id: str) -> Span:
+ """
+ Get a span by ID.
+
+ Args:
+ span_id: The ID of the span to get.
+
+ Returns:
+ The requested span.
+ """
+ # Query from Agentex API
+ span = await self.client.spans.retrieve(span_id)
+ return span
+
+ async def list_spans(self) -> list[Span]:
+ """
+ List all spans in this trace.
+
+ Returns:
+ List of spans in this trace.
+ """
+ # Query from Agentex API
+ spans = await self.client.spans.list(trace_id=self.trace_id)
+ return spans
+
+ @asynccontextmanager
+ async def span(
+ self,
+ name: str,
+ parent_id: str | None = None,
+ input: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ data: dict[str, Any] | list[dict[str, Any]] | BaseModel | None = None,
+ ) -> AsyncGenerator[Span | None, None]:
+ """
+ Context manager for spans.
+
+ Args:
+ name: Name of the span.
+ parent_id: Optional parent span ID.
+ input: Optional input data for the span.
+ data: Optional additional data for the span.
+
+ Yields:
+ The span object.
+ """
+ if not self.trace_id:
+ yield None
+ return
+ span = await self.start_span(name, parent_id, input, data)
+ try:
+ yield span
+ finally:
+ await self.end_span(span)
diff --git a/src/agentex/lib/core/tracing/tracer.py b/src/agentex/lib/core/tracing/tracer.py
new file mode 100644
index 000000000..806d32ee3
--- /dev/null
+++ b/src/agentex/lib/core/tracing/tracer.py
@@ -0,0 +1,70 @@
+from agentex import Agentex, AsyncAgentex
+from agentex.lib.core.tracing.trace import AsyncTrace, Trace
+from agentex.lib.core.tracing.tracing_processor_manager import (
+ get_async_tracing_processors,
+ get_sync_tracing_processors,
+)
+
+
+class Tracer:
+ """
+ Tracer is the main entry point for tracing in Agentex.
+ It manages the client connection and creates traces.
+ """
+
+ def __init__(self, client: Agentex):
+ """
+ Initialize a new sync tracer with the provided client.
+
+ Args:
+ client: Agentex client instance used for API communication.
+ """
+ self.client = client
+
+ def trace(self, trace_id: str | None = None) -> Trace:
+ """
+ Create a new trace with the given trace ID.
+
+ Args:
+ trace_id: The trace ID to use.
+
+ Returns:
+ A new Trace instance.
+ """
+ return Trace(
+ processors=get_sync_tracing_processors(),
+ client=self.client,
+ trace_id=trace_id,
+ )
+
+
+class AsyncTracer:
+ """
+ AsyncTracer is the async version of Tracer.
+ It manages the async client connection and creates async traces.
+ """
+
+ def __init__(self, client: AsyncAgentex):
+ """
+ Initialize a new async tracer with the provided client.
+
+ Args:
+ client: AsyncAgentex client instance used for API communication.
+ """
+ self.client = client
+
+ def trace(self, trace_id: str | None = None) -> AsyncTrace:
+ """
+ Create a new trace with the given trace ID.
+
+ Args:
+ trace_id: The trace ID to use.
+
+ Returns:
+ A new AsyncTrace instance.
+ """
+ return AsyncTrace(
+ processors=get_async_tracing_processors(),
+ client=self.client,
+ trace_id=trace_id,
+ )
diff --git a/src/agentex/lib/core/tracing/tracing_processor_manager.py b/src/agentex/lib/core/tracing/tracing_processor_manager.py
new file mode 100644
index 000000000..ffbfc6b1c
--- /dev/null
+++ b/src/agentex/lib/core/tracing/tracing_processor_manager.py
@@ -0,0 +1,62 @@
+from threading import Lock
+
+from agentex.lib.core.tracing.processors.agentex_tracing_processor import (
+ AgentexAsyncTracingProcessor,
+ AgentexSyncTracingProcessor,
+)
+from agentex.lib.core.tracing.processors.sgp_tracing_processor import (
+ SGPAsyncTracingProcessor,
+ SGPSyncTracingProcessor,
+)
+from agentex.lib.core.tracing.processors.tracing_processor_interface import (
+ AsyncTracingProcessor,
+ SyncTracingProcessor,
+)
+from agentex.lib.types.tracing import AgentexTracingProcessorConfig, TracingProcessorConfig
+
+
+class TracingProcessorManager:
+ def __init__(self):
+ # Mapping of processor config type to processor class
+ self.sync_config_registry: dict[str, type[SyncTracingProcessor]] = {
+ "agentex": AgentexSyncTracingProcessor,
+ "sgp": SGPSyncTracingProcessor,
+ }
+ self.async_config_registry: dict[str, type[AsyncTracingProcessor]] = {
+ "agentex": AgentexAsyncTracingProcessor,
+ "sgp": SGPAsyncTracingProcessor,
+ }
+ # Cache for processors
+ self.sync_processors: list[SyncTracingProcessor] = []
+ self.async_processors: list[AsyncTracingProcessor] = []
+ self.lock = Lock()
+
+ def add_processor_config(self, processor_config: TracingProcessorConfig) -> None:
+ with self.lock:
+ sync_processor = self.sync_config_registry[processor_config.type]
+ async_processor = self.async_config_registry[processor_config.type]
+ self.sync_processors.append(sync_processor(processor_config))
+ self.async_processors.append(async_processor(processor_config))
+
+ def set_processor_configs(self, processor_configs: list[TracingProcessorConfig]):
+ with self.lock:
+ for processor_config in processor_configs:
+ self.add_processor_config(processor_config)
+
+ def get_sync_processors(self) -> list[SyncTracingProcessor]:
+ return self.sync_processors
+
+ def get_async_processors(self) -> list[AsyncTracingProcessor]:
+ return self.async_processors
+
+
+# Global instance
+GLOBAL_TRACING_PROCESSOR_MANAGER = TracingProcessorManager()
+
+add_tracing_processor_config = GLOBAL_TRACING_PROCESSOR_MANAGER.add_processor_config
+set_tracing_processor_configs = GLOBAL_TRACING_PROCESSOR_MANAGER.set_processor_configs
+get_sync_tracing_processors = GLOBAL_TRACING_PROCESSOR_MANAGER.get_sync_processors
+get_async_tracing_processors = GLOBAL_TRACING_PROCESSOR_MANAGER.get_async_processors
+
+# Add the Agentex tracing processor by default
+add_tracing_processor_config(AgentexTracingProcessorConfig())
diff --git a/src/agentex/lib/environment_variables.py b/src/agentex/lib/environment_variables.py
new file mode 100644
index 000000000..2030fa483
--- /dev/null
+++ b/src/agentex/lib/environment_variables.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+import os
+from enum import Enum
+from pathlib import Path
+
+from dotenv import load_dotenv
+
+from agentex.lib.utils.model_utils import BaseModel
+
+PROJECT_ROOT = Path(__file__).resolve().parents[2]
+
+
+class EnvVarKeys(str, Enum):
+ ENVIRONMENT = "ENVIRONMENT"
+ TEMPORAL_ADDRESS = "TEMPORAL_ADDRESS"
+ REDIS_URL = "REDIS_URL"
+ AGENTEX_BASE_URL = "AGENTEX_BASE_URL"
+ # Agent Identifiers
+ AGENT_NAME = "AGENT_NAME"
+ AGENT_DESCRIPTION = "AGENT_DESCRIPTION"
+ AGENT_ID = "AGENT_ID"
+ # ACP Configuration
+ ACP_URL = "ACP_URL"
+ ACP_PORT = "ACP_PORT"
+ ACP_TYPE = "ACP_TYPE"
+ # Workflow Configuraiton
+ WORKFLOW_NAME = "WORKFLOW_NAME"
+ WORKFLOW_TASK_QUEUE = "WORKFLOW_TASK_QUEUE"
+
+
+class Environment(str, Enum):
+ DEV = "development"
+ STAGING = "staging"
+ PROD = "production"
+
+
+refreshed_environment_variables = None
+
+
+class EnvironmentVariables(BaseModel):
+ ENVIRONMENT: str = Environment.DEV
+ TEMPORAL_ADDRESS: str | None = "localhost:7233"
+ REDIS_URL: str | None = None
+ AGENTEX_BASE_URL: str | None = "http://localhost:5003"
+ # Agent Identifiers
+ AGENT_NAME: str
+ AGENT_DESCRIPTION: str | None = None
+ AGENT_ID: str | None = None
+ ACP_TYPE: str | None = "agentic"
+ # ACP Configuration
+ ACP_URL: str
+ ACP_PORT: int = 8000
+ # Workflow Configuration
+ WORKFLOW_TASK_QUEUE: str | None = None
+ WORKFLOW_NAME: str | None = None
+
+ @classmethod
+ def refresh(cls) -> EnvironmentVariables | None:
+ global refreshed_environment_variables
+ if refreshed_environment_variables is not None:
+ return refreshed_environment_variables
+
+ if os.environ.get(EnvVarKeys.ENVIRONMENT) == Environment.DEV:
+ # Load global .env file first
+ global_env_path = PROJECT_ROOT / ".env"
+ if global_env_path.exists():
+ print(f"Loading global environment variables FROM: {global_env_path}")
+ load_dotenv(dotenv_path=global_env_path, override=False)
+
+ # Load local project .env.local file (takes precedence)
+ local_env_path = Path.cwd().parent / ".env.local"
+ if local_env_path.exists():
+ print(f"Loading local environment variables FROM: {local_env_path}")
+ load_dotenv(dotenv_path=local_env_path, override=True)
+
+ # Create kwargs dict with environment variables, using None for missing values
+ # Pydantic will use the default values when None is passed for optional fields
+ kwargs = {}
+ for key in EnvVarKeys:
+ env_value = os.environ.get(key.value)
+ if env_value is not None:
+ kwargs[key.value] = env_value
+
+ environment_variables = EnvironmentVariables(**kwargs)
+ refreshed_environment_variables = environment_variables
+ return refreshed_environment_variables
diff --git a/src/agentex/lib/py.typed b/src/agentex/lib/py.typed
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/sdk/__init__.py b/src/agentex/lib/sdk/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/sdk/config/__init__.py b/src/agentex/lib/sdk/config/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/sdk/config/agent_config.py b/src/agentex/lib/sdk/config/agent_config.py
new file mode 100644
index 000000000..164ed12fd
--- /dev/null
+++ b/src/agentex/lib/sdk/config/agent_config.py
@@ -0,0 +1,61 @@
+from typing import Any, Literal
+
+from pydantic import Field
+
+from agentex.lib.types.agent_configs import TemporalConfig, TemporalWorkflowConfig
+from agentex.lib.types.credentials import CredentialMapping
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+
+logger = make_logger(__name__)
+
+
+class AgentConfig(BaseModel):
+ name: str = Field(
+ ...,
+ description="The name of the agent.",
+ pattern=r"^[a-z0-9-]+$",
+ )
+ acp_type: Literal["sync", "agentic"] = Field(..., description="The type of agent.")
+ description: str = Field(..., description="The description of the agent.")
+ env: dict[str, str] | None = Field(
+ default=None, description="Environment variables to set directly in the agent deployment"
+ )
+ credentials: list[CredentialMapping | dict[str, Any]] | None = Field(
+ default=None,
+ description="List of credential mappings to mount to the agent deployment. Supports both legacy format and new typed credentials.",
+ )
+ temporal: TemporalConfig | None = Field(
+ default=None, description="Temporal workflow configuration for this agent"
+ )
+
+ def is_temporal_agent(self) -> bool:
+ """Check if this agent uses Temporal workflows"""
+ # Check temporal config with enabled flag
+ if self.temporal and self.temporal.enabled:
+ return True
+ return False
+
+ def get_temporal_workflow_config(self) -> TemporalWorkflowConfig | None:
+ """Get temporal workflow configuration, checking both new and legacy formats"""
+ # Check new workflows list first
+ if self.temporal and self.temporal.enabled and self.temporal.workflows:
+ return self.temporal.workflows[0] # Return first workflow for backward compatibility
+
+ # Check legacy single workflow
+ if self.temporal and self.temporal.enabled and self.temporal.workflow:
+ return self.temporal.workflow
+
+ return None
+
+ def get_temporal_workflows(self) -> list[TemporalWorkflowConfig]:
+ """Get all temporal workflow configurations"""
+ # Check new workflows list first
+ if self.temporal and self.temporal.enabled and self.temporal.workflows:
+ return self.temporal.workflows
+
+ # Check legacy single workflow
+ if self.temporal and self.temporal.enabled and self.temporal.workflow:
+ return [self.temporal.workflow]
+
+ return []
diff --git a/src/agentex/lib/sdk/config/agent_manifest.py b/src/agentex/lib/sdk/config/agent_manifest.py
new file mode 100644
index 000000000..77733c782
--- /dev/null
+++ b/src/agentex/lib/sdk/config/agent_manifest.py
@@ -0,0 +1,219 @@
+from __future__ import annotations
+
+import io
+import shutil
+import subprocess
+import tarfile
+import tempfile
+import time
+from collections.abc import Iterator
+from contextlib import contextmanager
+from pathlib import Path
+from typing import IO, Any
+
+from pydantic import Field
+
+from agentex.lib.sdk.config.agent_config import AgentConfig
+from agentex.lib.sdk.config.build_config import BuildConfig
+from agentex.lib.sdk.config.deployment_config import DeploymentConfig
+from agentex.lib.sdk.config.local_development_config import LocalDevelopmentConfig
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+
+logger = make_logger(__name__)
+
+
+class AgentManifest(BaseModel):
+ """
+ Represents a manifest file that describes how to build and deploy an agent.
+ """
+
+ build: BuildConfig
+ agent: AgentConfig
+ local_development: LocalDevelopmentConfig | None = Field(
+ default=None, description="Configuration for local development"
+ )
+ deployment: DeploymentConfig | None = Field(
+ default=None, description="Deployment configuration for the agent"
+ )
+
+ def context_manager(self, build_context_root: Path) -> BuildContextManager:
+ """
+ Creates a build context manager
+ """
+ return BuildContextManager(
+ agent_manifest=self, build_context_root=build_context_root
+ )
+
+
+class BuildContextManager:
+ """
+ A gateway used to manage the build context for a docker image
+ """
+
+ def __init__(self, agent_manifest: AgentManifest, build_context_root: Path):
+ self.agent_manifest = agent_manifest
+ self.build_context_root = build_context_root
+ self._temp_dir: tempfile.TemporaryDirectory | None = None
+
+ self.path: Path | None = None
+ self.dockerfile_path = "Dockerfile"
+ self.dockerignore_path = ".dockerignore"
+ self.directory_paths: list[Path] = []
+
+ def __enter__(self) -> BuildContextManager:
+ self._temp_dir = tempfile.TemporaryDirectory()
+ self.path = Path(self._temp_dir.name)
+
+ dockerfile_path = (
+ self.build_context_root / self.agent_manifest.build.context.dockerfile
+ )
+ self.add_dockerfile(root_path=self.path, dockerfile_path=dockerfile_path)
+
+ ignore_patterns = []
+ if self.agent_manifest.build.context.dockerignore:
+ dockerignore_path = (
+ self.build_context_root / self.agent_manifest.build.context.dockerignore
+ )
+ self.add_dockerignore(
+ root_path=self.path, dockerignore_path=dockerignore_path
+ )
+ ignore_patterns = _extract_dockerignore_patterns(dockerignore_path)
+
+ for directory in self.agent_manifest.build.context.include_paths:
+ directory_path = self.build_context_root / directory
+ self.add_directory(
+ root_path=self.path,
+ directory_path=directory_path,
+ context_root=self.build_context_root,
+ ignore_patterns=ignore_patterns,
+ )
+
+ return self
+
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
+ if self._temp_dir:
+ self._temp_dir.cleanup()
+
+ def add_dockerfile(self, root_path: Path, dockerfile_path: Path) -> None:
+ """
+ Copies a dockerfile to the temporary context directory root
+ """
+ shutil.copy2(dockerfile_path, root_path / self.dockerfile_path)
+
+ def add_dockerignore(self, root_path: Path, dockerignore_path: Path) -> None:
+ """
+ Copies a dockerignore to the temporary context directory root
+ """
+ shutil.copy2(str(dockerignore_path), root_path / self.dockerignore_path)
+
+ def add_directory(
+ self,
+ root_path: Path,
+ directory_path: Path,
+ context_root: Path,
+ ignore_patterns: list[str] | None = None,
+ ) -> None:
+ """
+ Copies a directory to the temporary context directory root while maintaining its relative
+ path to the context root.
+ """
+ directory_copy_start_time = time.time()
+ last_log_time = directory_copy_start_time
+
+ def copy_function_with_progress(src, dst):
+ nonlocal directory_copy_start_time
+ nonlocal last_log_time
+ logger.info(f"Adding {src} to build context...")
+ shutil.copy2(src, dst)
+ current_time = time.time()
+ time_elapsed = current_time - directory_copy_start_time
+
+ if time_elapsed > 1 and current_time - last_log_time >= 1:
+ logger.info(
+ f"Time elapsed copying ({directory_path}): {time_elapsed} "
+ f"seconds"
+ )
+ last_log_time = current_time
+ if time_elapsed > 5:
+ logger.warning(
+ f"This may take a while... "
+ f"Consider adding {directory_path} or {src} to your .dockerignore file."
+ )
+
+ directory_path_relative_to_root = directory_path.relative_to(context_root)
+ all_ignore_patterns = [f"{root_path}*"]
+ if ignore_patterns:
+ all_ignore_patterns += ignore_patterns
+ shutil.copytree(
+ src=directory_path,
+ dst=root_path / directory_path_relative_to_root,
+ ignore=shutil.ignore_patterns(*all_ignore_patterns),
+ dirs_exist_ok=True,
+ copy_function=copy_function_with_progress,
+ )
+ self.directory_paths.append(directory_path_relative_to_root)
+
+ @contextmanager
+ def zip_stream(self, root_path: Path | None = None) -> Iterator[IO[bytes]]:
+ """
+ Creates a tar archive of the temporary context directory
+ and returns a stream of the archive.
+ """
+ if not root_path:
+ raise ValueError("root_path must be provided")
+ context = str(root_path.absolute())
+ folders_to_include = "."
+ tar_command = ["tar", "-C", context, "-cf", "-"]
+ tar_command.extend(folders_to_include)
+
+ logger.info(f"Creating archive: {' '.join(tar_command)}")
+
+ with subprocess.Popen(
+ tar_command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ ) as proc:
+ assert proc.stdout is not None
+ try:
+ yield proc.stdout
+ finally:
+ pass
+
+ @staticmethod
+ @contextmanager
+ def zipped(root_path: Path | None = None) -> Iterator[IO[bytes]]:
+ """
+ Creates a tar.gz archive of the temporary context directory
+ and returns a stream of the archive.
+ """
+ if not root_path:
+ raise ValueError("root_path must be provided")
+
+ tar_buffer = io.BytesIO()
+
+ with tarfile.open(fileobj=tar_buffer, mode="w:gz") as tar_file:
+ for path in Path(root_path).rglob(
+ "*"
+ ): # Recursively add files to the tar.gz
+ if path.is_file(): # Ensure that we're only adding files
+ tar_file.add(path, arcname=path.relative_to(root_path))
+
+ tar_buffer.seek(0) # Reset the buffer position to the beginning
+ yield tar_buffer
+
+
+def _extract_dockerignore_patterns(dockerignore_path: Path) -> list[str]:
+ """
+ Extracts glob patterns to ignore from the dockerignore into a list of patterns
+ :param dockerignore_path: Path to the dockerignore to extract patterns from
+ :return: List of glob patterns to ignore
+ :rtype: List[str]
+ """
+ ignore_patterns = []
+ with open(dockerignore_path) as file:
+ for line in file:
+ ignored_filepath = line.split("#", 1)[0].strip()
+ if ignored_filepath:
+ ignore_patterns.append(ignored_filepath)
+ return ignore_patterns
diff --git a/src/agentex/lib/sdk/config/build_config.py b/src/agentex/lib/sdk/config/build_config.py
new file mode 100644
index 000000000..5b8d994bb
--- /dev/null
+++ b/src/agentex/lib/sdk/config/build_config.py
@@ -0,0 +1,35 @@
+from pydantic import Field
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class BuildContext(BaseModel):
+ """
+ Represents the context in which the Docker image should be built.
+ """
+
+ root: str = Field(
+ ...,
+ description="The root directory of the build context. Should be specified relative to the location of the "
+ "build config file.",
+ )
+ include_paths: list[str] = Field(
+ default_factory=list,
+ description="The paths to include in the build context. Should be specified relative to the root directory.",
+ )
+ dockerfile: str = Field(
+ ...,
+ description="The path to the Dockerfile. Should be specified relative to the root directory.",
+ )
+ dockerignore: str | None = Field(
+ None,
+ description="The path to the .dockerignore file. Should be specified relative to the root directory.",
+ )
+
+
+class BuildConfig(BaseModel):
+ """
+ Represents a configuration for building the action as a Docker image.
+ """
+
+ context: BuildContext
diff --git a/src/agentex/lib/sdk/config/deployment_config.py b/src/agentex/lib/sdk/config/deployment_config.py
new file mode 100644
index 000000000..7972dbdae
--- /dev/null
+++ b/src/agentex/lib/sdk/config/deployment_config.py
@@ -0,0 +1,117 @@
+from typing import Any
+
+from pydantic import Field
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class ImageConfig(BaseModel):
+ """Configuration for container images"""
+
+ repository: str = Field(..., description="Container image repository URL")
+ tag: str = Field(default="latest", description="Container image tag")
+
+
+class ImagePullSecretConfig(BaseModel):
+ """Configuration for image pull secrets"""
+
+ name: str = Field(..., description="Name of the image pull secret")
+
+
+class ResourceRequirements(BaseModel):
+ """Resource requirements for containers"""
+
+ cpu: str = Field(
+ default="500m", description="CPU request/limit (e.g., '500m', '1')"
+ )
+ memory: str = Field(
+ default="1Gi", description="Memory request/limit (e.g., '1Gi', '512Mi')"
+ )
+
+
+class ResourceConfig(BaseModel):
+ """Resource configuration for containers"""
+
+ requests: ResourceRequirements = Field(
+ default_factory=ResourceRequirements, description="Resource requests"
+ )
+ limits: ResourceRequirements = Field(
+ default_factory=ResourceRequirements, description="Resource limits"
+ )
+
+
+class GlobalDeploymentConfig(BaseModel):
+ """Global deployment configuration that applies to all clusters"""
+
+ agent: dict[str, str] = Field(
+ default_factory=dict, description="Agent metadata (name, description)"
+ )
+ replicaCount: int = Field(default=1, description="Number of replicas to deploy")
+ resources: ResourceConfig = Field(
+ default_factory=ResourceConfig, description="Resource requirements"
+ )
+
+
+class DeploymentConfig(BaseModel):
+ """Main deployment configuration in the manifest"""
+
+ image: ImageConfig = Field(..., description="Container image configuration")
+ imagePullSecrets: list[ImagePullSecretConfig] | None = Field(
+ default=None, description="Image pull secrets to use for the deployment"
+ )
+ global_config: GlobalDeploymentConfig = Field(
+ default_factory=GlobalDeploymentConfig,
+ description="Global deployment settings",
+ alias="global",
+ )
+
+ class Config:
+ validate_by_name = True
+
+
+class ClusterConfig(BaseModel):
+ """Per-cluster deployment overrides"""
+
+ image: ImageConfig | None = Field(
+ default=None, description="Cluster-specific image overrides"
+ )
+ replicaCount: int | None = Field(
+ default=None, description="Cluster-specific replica count"
+ )
+ resources: ResourceConfig | None = Field(
+ default=None, description="Cluster-specific resource overrides"
+ )
+ env: list[dict[str, str]] | None = Field(
+ default=None, description="Additional environment variables for this cluster"
+ )
+ # Allow additional arbitrary overrides for advanced users
+ additional_overrides: dict[str, Any] | None = Field(
+ default=None, description="Additional helm chart value overrides"
+ )
+
+
+class InjectedImagePullSecretValues(BaseModel):
+ """Values for image pull secrets"""
+
+ registry: str = Field(..., description="Registry of the image pull secret")
+ username: str = Field(..., description="Username of the image pull secret")
+ password: str = Field(..., description="Password of the image pull secret")
+ email: str | None = Field(
+ default=None, description="Email of the image pull secret"
+ )
+
+
+class InjectedSecretsValues(BaseModel):
+ """Values for injected secrets"""
+
+ # Defined as a dictionary because the names need to be unique
+ credentials: dict[str, Any] = Field(
+ default_factory=dict, description="Secrets to inject into the deployment"
+ )
+ imagePullSecrets: dict[str, InjectedImagePullSecretValues] = Field(
+ default_factory=dict,
+ description="Image pull secrets to inject into the deployment",
+ )
+
+ class Config:
+ validate_by_name = True
diff --git a/src/agentex/lib/sdk/config/local_development_config.py b/src/agentex/lib/sdk/config/local_development_config.py
new file mode 100644
index 000000000..72532c6f5
--- /dev/null
+++ b/src/agentex/lib/sdk/config/local_development_config.py
@@ -0,0 +1,56 @@
+from pathlib import Path
+
+from pydantic import Field, validator
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class LocalAgentConfig(BaseModel):
+ """Configuration for local agent development"""
+
+ port: int = Field(
+ ...,
+ description="The port where the agent's ACP server is running locally",
+ gt=0,
+ lt=65536,
+ )
+ host_address: str = Field(
+ default="host.docker.internal",
+ description="The host address where the agent's ACP server can be reached (e.g., host.docker.internal for Docker, localhost for direct)",
+ )
+
+
+class LocalPathsConfig(BaseModel):
+ """Configuration for local file paths"""
+
+ acp: str = Field(
+ default="project/acp.py",
+ description="Path to the ACP server file. Can be relative to manifest directory or absolute.",
+ )
+ worker: str | None = Field(
+ default=None,
+ description="Path to the temporal worker file. Can be relative to manifest directory or absolute. (only for temporal agents)",
+ )
+
+ @validator("acp", "worker")
+ def validate_path_format(cls, v):
+ """Validate that the path is a reasonable format"""
+ if v is None:
+ return v
+
+ # Convert to Path to validate format
+ try:
+ Path(v)
+ except Exception as e:
+ raise ValueError(f"Invalid path format: {v}") from e
+
+ return v
+
+
+class LocalDevelopmentConfig(BaseModel):
+ """Configuration for local development environment"""
+
+ agent: LocalAgentConfig = Field(..., description="Local agent configuration")
+ paths: LocalPathsConfig | None = Field(
+ default=None, description="File paths for local development"
+ )
diff --git a/src/agentex/lib/sdk/config/project_config.py b/src/agentex/lib/sdk/config/project_config.py
new file mode 100644
index 000000000..9b91ce2c4
--- /dev/null
+++ b/src/agentex/lib/sdk/config/project_config.py
@@ -0,0 +1,103 @@
+import os
+import re
+from pathlib import Path
+from typing import Any, TypeVar
+
+import yaml
+from jinja2 import BaseLoader, Environment, StrictUndefined, TemplateError
+
+T = TypeVar("T")
+
+
+class ConfigResolutionError(Exception):
+ def __init__(self, message: str) -> None:
+ super().__init__(message)
+ self.status_code = 400
+
+
+def _preprocess_template(template_str: str) -> str:
+ # Replace $env. and $variables. with unique internal names
+ return template_str.replace("{{ $env.", "{{ __special_env__.").replace(
+ "{{ $variables.", "{{ __special_variables__."
+ )
+
+
+def _extract_variables_section(raw_config_str: str) -> str:
+ # Use regex to extract the variables: ... block (YAML top-level)
+ match = re.search(
+ r"(^variables:.*?)(^config:|\Z)", raw_config_str, re.DOTALL | re.MULTILINE
+ )
+ if not match:
+ return ""
+ return match.group(1)
+
+
+def ProjectConfigLoader(
+ config_path: str, model: type[T] | None = None, env_path: str | None = None
+) -> dict[str, Any] | T:
+ config_path = Path(config_path)
+ env_path = Path(env_path) if env_path else config_path.parent / ".env"
+ env = _load_env(env_path)
+ raw_config_str = _load_file_as_str(config_path)
+ raw_config_str = _preprocess_template(raw_config_str)
+
+ # Extract and render only the variables section
+ variables_section_str = _extract_variables_section(raw_config_str)
+ env_context = {"__special_env__": env, "__special_variables__": {}}
+ try:
+ env_only_template = Environment(
+ loader=BaseLoader(),
+ undefined=StrictUndefined,
+ keep_trailing_newline=True,
+ autoescape=False,
+ ).from_string(variables_section_str)
+ rendered_variables_yaml = env_only_template.render(**env_context)
+ variables_dict = yaml.safe_load(rendered_variables_yaml).get("variables", {})
+ except Exception as e:
+ raise ConfigResolutionError(f"Error rendering variables with $env: {e}") from e
+ # Second pass: render the whole config with both __special_env__ and resolved __special_variables__
+ full_context = {"__special_env__": env, "__special_variables__": variables_dict}
+ rendered_config_str = _jinja_render(raw_config_str, full_context)
+ try:
+ rendered_config = yaml.safe_load(rendered_config_str)
+ except Exception as e:
+ raise ConfigResolutionError(f"Error loading rendered YAML: {e}") from e
+ if "config" not in rendered_config:
+ raise ConfigResolutionError("Missing 'config' section in config file.")
+ config_section = rendered_config["config"]
+ if model is not None:
+ return model(**config_section)
+ return config_section
+
+
+def _load_env(env_path: Path) -> dict[str, str]:
+ env = dict(os.environ)
+ if env_path.exists():
+ with open(env_path) as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ continue
+ if "=" in line:
+ k, v = line.split("=", 1)
+ env[k.strip()] = v.strip()
+ return env
+
+
+def _load_file_as_str(path: Path) -> str:
+ with open(path) as f:
+ return f.read()
+
+
+def _jinja_render(template_str: str, context: dict) -> str:
+ try:
+ env = Environment(
+ loader=BaseLoader(),
+ undefined=StrictUndefined,
+ keep_trailing_newline=True,
+ autoescape=False,
+ )
+ template = env.from_string(template_str)
+ return template.render(**context)
+ except TemplateError as e:
+ raise ConfigResolutionError(f"Jinja template error: {e}") from e
diff --git a/src/agentex/lib/sdk/fastacp/__init__.py b/src/agentex/lib/sdk/fastacp/__init__.py
new file mode 100644
index 000000000..b69863798
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/__init__.py
@@ -0,0 +1,3 @@
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+
+__all__ = ["FastACP"]
diff --git a/src/agentex/lib/sdk/fastacp/base/base_acp_server.py b/src/agentex/lib/sdk/fastacp/base/base_acp_server.py
new file mode 100644
index 000000000..746a1af0b
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/base/base_acp_server.py
@@ -0,0 +1,406 @@
+import asyncio
+import inspect
+from collections.abc import AsyncGenerator, Awaitable, Callable
+from contextlib import asynccontextmanager
+from typing import Any
+
+import httpx
+import uvicorn
+from fastapi import FastAPI, Request
+from fastapi.responses import StreamingResponse
+from pydantic import TypeAdapter, ValidationError
+
+# from agentex.lib.sdk.fastacp.types import BaseACPConfig
+from agentex.lib.environment_variables import EnvironmentVariables
+from agentex.lib.types.acp import (
+ PARAMS_MODEL_BY_METHOD,
+ RPC_SYNC_METHODS,
+ CancelTaskParams,
+ CreateTaskParams,
+ RPCMethod,
+ SendEventParams,
+ SendMessageParams,
+)
+from agentex.lib.types.json_rpc import JSONRPCError, JSONRPCRequest, JSONRPCResponse
+from agentex.lib.types.task_message_updates import StreamTaskMessageFull, TaskMessageUpdate
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.lib.utils.logging import make_logger
+from agentex.lib.utils.model_utils import BaseModel
+
+logger = make_logger(__name__)
+
+# Create a TypeAdapter for TaskMessageUpdate validation
+task_message_update_adapter = TypeAdapter(TaskMessageUpdate)
+
+
+class BaseACPServer(FastAPI):
+ """
+ AsyncAgentACP provides RPC-style hooks for agent events and commands asynchronously.
+ All methods follow JSON-RPC 2.0 format.
+
+ Available methods:
+ - event/send → Send a message to a task
+ - task/cancel → Cancel a task
+ - task/approve → Approve a task
+ """
+
+ def __init__(self):
+ super().__init__(lifespan=self.get_lifespan_function())
+
+ self.get("/healthz")(self._healthz)
+ self.post("/api")(self._handle_jsonrpc)
+
+ # Method handlers
+ self._handlers: dict[RPCMethod, Callable] = {}
+
+ @classmethod
+ def create(cls):
+ """Create and initialize BaseACPServer instance"""
+ instance = cls()
+ instance._setup_handlers()
+ return instance
+
+ def _setup_handlers(self):
+ """Set up default handlers - override in subclasses"""
+ # Base class has no default handlers
+ pass
+
+ def get_lifespan_function(self):
+ @asynccontextmanager
+ async def lifespan_context(app: FastAPI):
+ env_vars = EnvironmentVariables.refresh()
+ if env_vars.AGENTEX_BASE_URL:
+ await self._register_agent(env_vars)
+ else:
+ logger.warning("AGENTEX_BASE_URL not set, skipping agent registration")
+
+ yield
+
+ return lifespan_context
+
+ async def _healthz(self):
+ """Health check endpoint"""
+ return {"status": "healthy"}
+
+ def _wrap_handler(self, fn: Callable[..., Awaitable[Any]]):
+ """Wraps handler functions to provide JSON-RPC 2.0 response format"""
+
+ async def wrapper(*args, **kwargs) -> Any:
+ return await fn(*args, **kwargs)
+
+ return wrapper
+
+ async def _handle_jsonrpc(self, request: Request):
+ """Main JSON-RPC endpoint handler"""
+ rpc_request = None
+ try:
+ data = await request.json()
+ rpc_request = JSONRPCRequest(**data)
+
+ # Check if method is valid first
+ try:
+ method = RPCMethod(rpc_request.method)
+ except ValueError:
+ logger.error(f"Method {rpc_request.method} was invalid")
+ return JSONRPCResponse(
+ id=rpc_request.id,
+ error=JSONRPCError(
+ code=-32601, message=f"Method {rpc_request.method} not found"
+ ),
+ )
+
+ if method not in self._handlers or self._handlers[method] is None:
+ logger.error(f"Method {method} not found on existing ACP server")
+ return JSONRPCResponse(
+ id=rpc_request.id,
+ error=JSONRPCError(
+ code=-32601, message=f"Method {method} not found"
+ ),
+ )
+
+ # Parse params into appropriate model based on method
+ params_model = PARAMS_MODEL_BY_METHOD[method]
+ params = params_model.model_validate(rpc_request.params)
+
+ if method in RPC_SYNC_METHODS:
+ handler = self._handlers[method]
+ result = await handler(params)
+
+ if rpc_request.id is None:
+ # Seems like you should return None for notifications
+ return None
+ else:
+ # Handle streaming vs non-streaming for MESSAGE_SEND
+ if method == RPCMethod.MESSAGE_SEND and isinstance(
+ result, AsyncGenerator
+ ):
+ return await self._handle_streaming_response(
+ rpc_request.id, result
+ )
+ else:
+ if isinstance(result, BaseModel):
+ result = result.model_dump()
+ return JSONRPCResponse(id=rpc_request.id, result=result)
+ else:
+ # If this is a notification (no request ID), process in background and return immediately
+ if rpc_request.id is None:
+ asyncio.create_task(self._process_notification(method, params))
+ return JSONRPCResponse(id=None)
+
+ # For regular requests, start processing in background but return immediately
+ asyncio.create_task(
+ self._process_request(rpc_request.id, method, params)
+ )
+
+ # Return immediate acknowledgment
+ return JSONRPCResponse(
+ id=rpc_request.id, result={"status": "processing"}
+ )
+
+ except Exception as e:
+ logger.error(f"Error handling JSON-RPC request: {e}", exc_info=True)
+ request_id = None
+ if rpc_request is not None:
+ request_id = rpc_request.id
+ return JSONRPCResponse(
+ id=request_id,
+ error=JSONRPCError(code=-32603, message=str(e)).model_dump(),
+ )
+
+ async def _handle_streaming_response(
+ self, request_id: int | str, async_gen: AsyncGenerator
+ ):
+ """Handle streaming response by formatting TaskMessageUpdate objects as JSON-RPC stream"""
+
+ async def generate_json_rpc_stream():
+ try:
+ async for chunk in async_gen:
+ # Each chunk should be a TaskMessageUpdate object
+ # Validate using Pydantic's TypeAdapter to ensure it's a proper TaskMessageUpdate
+ try:
+ # This will validate that chunk conforms to the TaskMessageUpdate union type
+ validated_chunk = task_message_update_adapter.validate_python(
+ chunk
+ )
+ # Use mode="json" to properly serialize datetime objects
+ chunk_data = validated_chunk.model_dump(mode="json")
+ except ValidationError as e:
+ raise TypeError(
+ f"Streaming chunks must be TaskMessageUpdate objects. Validation error: {e}"
+ ) from e
+ except Exception as e:
+ raise TypeError(
+ f"Streaming chunks must be TaskMessageUpdate objects, got {type(chunk)}: {e}"
+ ) from e
+
+ # Wrap in JSON-RPC response format
+ response = JSONRPCResponse(id=request_id, result=chunk_data)
+ # Use model_dump_json() which handles datetime serialization automatically
+ yield f"{response.model_dump_json()}\n"
+
+ except Exception as e:
+ logger.error(f"Error in streaming response: {e}", exc_info=True)
+ error_response = JSONRPCResponse(
+ id=request_id,
+ error=JSONRPCError(code=-32603, message=str(e)).model_dump(),
+ )
+ yield f"{error_response.model_dump_json()}\n"
+
+ return StreamingResponse(
+ generate_json_rpc_stream(),
+ media_type="application/x-ndjson", # Newline Delimited JSON
+ headers={
+ "Cache-Control": "no-cache",
+ "Connection": "keep-alive",
+ "X-Accel-Buffering": "no", # Disable nginx buffering
+ },
+ )
+
+ async def _process_notification(self, method: RPCMethod, params: Any):
+ """Process a notification (request with no ID) in the background"""
+ try:
+ handler = self._handlers[method]
+ await handler(params)
+ except Exception as e:
+ logger.error(f"Error processing notification {method}: {e}", exc_info=True)
+
+ async def _process_request(
+ self, request_id: int | str, method: RPCMethod, params: Any
+ ):
+ """Process a request in the background"""
+ try:
+ handler = self._handlers[method]
+ await handler(params)
+ # Note: In a real implementation, you might want to store the result somewhere
+ # or notify the client through a different mechanism
+ logger.info(
+ f"Successfully processed request {request_id} for method {method}"
+ )
+ except Exception as e:
+ logger.error(
+ f"Error processing request {request_id} for method {method}: {e}",
+ exc_info=True,
+ )
+
+ """
+ Define all possible decorators to be overriden and implemented by each ACP implementation
+ Then the users can override the default handlers by implementing their own handlers
+
+ ACP Type: Agentic
+ Decorators:
+ - on_task_create
+ - on_task_event_send
+ - on_task_cancel
+
+ ACP Type: Sync
+ Decorators:
+ - on_message_send
+ """
+
+ # Type: Agentic
+ def on_task_create(self, fn: Callable[[CreateTaskParams], Awaitable[Any]]):
+ """Handle task/init method"""
+ wrapped = self._wrap_handler(fn)
+ self._handlers[RPCMethod.TASK_CREATE] = wrapped
+ return fn
+
+ # Type: Agentic
+ def on_task_event_send(self, fn: Callable[[SendEventParams], Awaitable[Any]]):
+ """Handle event/send method"""
+
+ async def wrapped_handler(params: SendEventParams):
+ # # # Send message to client first most of the time
+ # ## But, sometimes you may want to process the message first
+ # ## and then send a message to the client
+ # await agentex.interactions.send_messages_to_client(
+ # task_id=params.task_id,
+ # messages=[params.message]
+ # )
+ return await fn(params)
+
+ wrapped = self._wrap_handler(wrapped_handler)
+ self._handlers[RPCMethod.EVENT_SEND] = wrapped
+ return fn
+
+ # Type: Agentic
+ def on_task_cancel(self, fn: Callable[[CancelTaskParams], Awaitable[Any]]):
+ """Handle task/cancel method"""
+ wrapped = self._wrap_handler(fn)
+ self._handlers[RPCMethod.TASK_CANCEL] = wrapped
+ return fn
+
+ # Type: Sync
+ def on_message_send(
+ self,
+ fn: Callable[
+ [SendMessageParams],
+ Awaitable[list[TaskMessageContent] | AsyncGenerator[TaskMessageUpdate, None]],
+ ],
+ ):
+ """Handle message/send method - supports both single and streaming responses
+
+ For non-streaming: return a single TaskMessage
+ For streaming: return an AsyncGenerator that yields TaskMessageUpdate objects
+ """
+
+ async def message_send_wrapper(params: SendMessageParams):
+ """Special wrapper for message_send that handles both regular async functions and async generators"""
+ # Check if the function is an async generator function
+
+ # Regardless of whether the Agent developer implemented an Async generator or not, we will always turn the function into an async generator and yield SSE events back tot he Agentex server so there is only one way for it to process the response. Then, based on the client's desire to stream or not, the Agentex server will either yield back the async generator objects directly (if streaming) or aggregate the content into a list of TaskMessageContents and to dispatch to the client. This basically gives the Agentex server the flexibility to handle both cases itself.
+
+ if inspect.isasyncgenfunction(fn):
+ # The client wants streaming, an async generator already streams the content, so just return it
+ return fn(params)
+ else:
+ # The client wants streaming, but the function is not an async generator, so we turn it into one and yield each TaskMessageContent as a StreamTaskMessageFull which will be streamed to the client by the Agentex server.
+ task_message_content_response = await fn(params)
+ if isinstance(task_message_content_response, list):
+ task_message_content_list = task_message_content_response
+ else:
+ task_message_content_list = [task_message_content_response]
+
+ async def async_generator(task_message_content_list: list[TaskMessageContent]):
+ for i, task_message_content in enumerate(task_message_content_list):
+ yield StreamTaskMessageFull(index=i, content=task_message_content)
+
+ return async_generator(task_message_content_list)
+
+ self._handlers[RPCMethod.MESSAGE_SEND] = message_send_wrapper
+ return fn
+
+ """
+ End of Decorators
+ """
+
+ """
+ ACP Server Lifecycle Methods
+ """
+
+ def run(self, host: str = "0.0.0.0", port: int = 8000, **kwargs):
+ """Start the Uvicorn server for async handlers."""
+ uvicorn.run(self, host=host, port=port, **kwargs)
+
+ async def _register_agent(self, env_vars: EnvironmentVariables):
+ """Register this agent with the Agentex server"""
+ # Build the agent's own URL
+ full_acp_url = f"{env_vars.ACP_URL.rstrip('/')}:{env_vars.ACP_PORT}"
+
+ description = (
+ env_vars.AGENT_DESCRIPTION
+ or f"Generic description for agent: {env_vars.AGENT_NAME}"
+ )
+ # Prepare registration data
+ registration_data = {
+ "name": env_vars.AGENT_NAME,
+ "description": description,
+ "acp_url": full_acp_url,
+ "acp_type": env_vars.ACP_TYPE,
+ }
+
+ if env_vars.AGENT_ID:
+ registration_data["agent_id"] = env_vars.AGENT_ID
+
+ # Make the registration request
+ registration_url = f"{env_vars.AGENTEX_BASE_URL.rstrip('/')}/agents/register"
+ # Retry logic with configurable attempts and delay
+ max_retries = 3
+ base_delay = 5 # seconds
+ last_exception = None
+
+ attempt = 0
+ while attempt < max_retries:
+ try:
+ async with httpx.AsyncClient() as client:
+ response = await client.post(
+ registration_url, json=registration_data, timeout=30.0
+ )
+ if response.status_code == 200:
+ logger.info(
+ f"Successfully registered agent '{env_vars.AGENT_NAME}' with Agentex server with acp_url: {full_acp_url}. Registration data: {registration_data}"
+ )
+ return # Success, exit the retry loop
+ else:
+ error_msg = f"Failed to register agent. Status: {response.status_code}, Response: {response.text}"
+ logger.error(error_msg)
+ last_exception = Exception(
+ f"Failed to startup agent: {response.text}"
+ )
+
+ except Exception as e:
+ logger.error(
+ f"Exception during agent registration attempt {attempt + 1}: {e}"
+ )
+ last_exception = e
+ attempt += 1
+ if attempt < max_retries:
+ delay = (attempt) * base_delay # 5, 10, 15 seconds
+ logger.info(
+ f"Retrying in {delay} seconds... (attempt {attempt}/{max_retries})"
+ )
+ await asyncio.sleep(delay)
+
+ # If we get here, all retries failed
+ raise last_exception or Exception(
+ f"Failed to register agent after {max_retries} attempts"
+ )
diff --git a/src/agentex/lib/sdk/fastacp/fastacp.py b/src/agentex/lib/sdk/fastacp/fastacp.py
new file mode 100644
index 000000000..493e9b491
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/fastacp.py
@@ -0,0 +1,74 @@
+from typing import Literal
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.sdk.fastacp.impl.agentic_base_acp import AgenticBaseACP
+from agentex.lib.sdk.fastacp.impl.sync_acp import SyncACP
+from agentex.lib.sdk.fastacp.impl.temporal_acp import TemporalACP
+from agentex.lib.types.fastacp import (
+ AgenticACPConfig,
+ BaseACPConfig,
+ SyncACPConfig,
+)
+
+# Add new mappings between ACP types and configs here
+# Add new mappings between ACP types and implementations here
+AGENTIC_ACP_IMPLEMENTATIONS: dict[Literal["temporal", "base"], type[BaseACPServer]] = {
+ "temporal": TemporalACP,
+ "base": AgenticBaseACP,
+}
+
+
+class FastACP:
+ """Factory for creating FastACP instances
+
+ Supports two main ACP types:
+ - "sync": Simple synchronous ACP implementation
+ - "agentic": Advanced ACP with sub-types "base" or "temporal" (requires config)
+ """
+
+ @staticmethod
+ # Note: the config is optional and not used right now but is there to be extended in the future
+ def create_sync_acp(config: SyncACPConfig | None = None, **kwargs) -> SyncACP:
+ """Create a SyncACP instance"""
+ return SyncACP.create(**kwargs)
+
+ @staticmethod
+ def create_agentic_acp(config: AgenticACPConfig, **kwargs) -> BaseACPServer:
+ """Create an agentic ACP instance (base or temporal)
+
+ Args:
+ config: AgenticACPConfig with type="base" or type="temporal"
+ **kwargs: Additional configuration parameters
+ """
+ # Get implementation class
+ implementation_class = AGENTIC_ACP_IMPLEMENTATIONS[config.type]
+ # Handle temporal-specific configuration
+ if config.type == "temporal":
+ # Extract temporal_address from config if it's a TemporalACPConfig
+ temporal_config = kwargs.copy()
+ if hasattr(config, "temporal_address"):
+ temporal_config["temporal_address"] = config.temporal_address
+ return implementation_class.create(**temporal_config)
+ else:
+ return implementation_class.create(**kwargs)
+
+ @staticmethod
+ def create(
+ acp_type: Literal["sync", "agentic"], config: BaseACPConfig | None = None, **kwargs
+ ) -> BaseACPServer | SyncACP | AgenticBaseACP | TemporalACP:
+ """Main factory method to create any ACP type
+
+ Args:
+ acp_type: Type of ACP to create ("sync" or "agentic")
+ config: Configuration object. Required for agentic type.
+ **kwargs: Additional configuration parameters
+ """
+
+ if acp_type == "sync":
+ sync_config = config if isinstance(config, SyncACPConfig) else None
+ return FastACP.create_sync_acp(sync_config, **kwargs)
+ elif acp_type == "agentic":
+ if config is None:
+ config = AgenticACPConfig(type="base")
+ if not isinstance(config, AgenticACPConfig):
+ raise ValueError("AgenticACPConfig is required for agentic ACP type")
+ return FastACP.create_agentic_acp(config, **kwargs)
diff --git a/src/agentex/lib/sdk/fastacp/impl/agentic_base_acp.py b/src/agentex/lib/sdk/fastacp/impl/agentic_base_acp.py
new file mode 100644
index 000000000..52311ccc0
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/impl/agentic_base_acp.py
@@ -0,0 +1,72 @@
+from typing import Any
+from typing_extensions import override
+from agentex import AsyncAgentex
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.types.acp import (
+ CancelTaskParams,
+ CreateTaskParams,
+ SendEventParams,
+)
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class AgenticBaseACP(BaseACPServer):
+ """
+ AgenticBaseACP implementation - a synchronous ACP that provides basic functionality
+ without any special async orchestration like Temporal.
+
+ This implementation provides simple synchronous processing of tasks
+ and is suitable for basic agent implementations.
+ """
+
+ def __init__(self):
+ super().__init__()
+ self._setup_handlers()
+ self._agentex_client = AsyncAgentex()
+
+ @classmethod
+ @override
+ def create(cls, **kwargs: Any) -> "AgenticBaseACP":
+ """Create and initialize SyncACP instance
+
+ Args:
+ **kwargs: Configuration parameters (unused in sync implementation)
+
+ Returns:
+ Initialized SyncACP instance
+ """
+ logger.info("Initializing AgenticBaseACP instance")
+ instance = cls()
+ logger.info("AgenticBaseACP instance initialized with default handlers")
+ return instance
+
+ @override
+ def _setup_handlers(self):
+ """Set up default handlers for sync operations"""
+
+ @self.on_task_create
+ async def handle_create_task(params: CreateTaskParams) -> None: # type: ignore[unused-function]
+ """Default create task handler - logs the task"""
+ logger.info(f"AgenticBaseACP creating task {params.task.id}")
+
+ @self.on_task_event_send
+ async def handle_event_send(params: SendEventParams) -> None: # type: ignore[unused-function]
+ """Default event handler - logs the event"""
+ logger.info(
+ f"AgenticBaseACP received event for task {params.task.id}: {params.event.id},"
+ f"content: {params.event.content}"
+ )
+ # TODO: Implement event handling logic here
+
+ # Implement cursor commit logic here
+ await self._agentex_client.tracker.update(
+ tracker_id=params.task.id,
+ last_processed_event_id=params.event.id,
+ )
+
+ @self.on_task_cancel
+ async def handle_cancel(params: CancelTaskParams) -> None: # type: ignore[unused-function]
+ """Default cancel handler - logs the cancellation"""
+ logger.info(f"AgenticBaseACP canceling task {params.task.id}")
diff --git a/src/agentex/lib/sdk/fastacp/impl/sync_acp.py b/src/agentex/lib/sdk/fastacp/impl/sync_acp.py
new file mode 100644
index 000000000..71187f4da
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/impl/sync_acp.py
@@ -0,0 +1,109 @@
+from collections.abc import AsyncGenerator
+from typing import Any, override
+
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.types.acp import SendMessageParams
+from agentex.lib.types.task_message_updates import (
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ TaskMessageUpdate,
+ TextDelta,
+)
+from agentex.types.task_message_content import TaskMessageContent, TextContent
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class SyncACP(BaseACPServer):
+ """
+ SyncACP provides synchronous request-response style communication.
+ Handlers execute and return responses immediately.
+
+ The SyncACP automatically creates input and output messages, so handlers
+ don't need to manually create TaskMessage objects via the Agentex API. All that needs
+ to be done is return the output message via TaskMessageContent objects.
+
+ Usage:
+ acp = SyncACP()
+
+ @acp.on_message_send
+ async def handle_message(params: SendMessageParams) -> TaskMessageContent:
+ # Process message and return response
+ pass
+
+ acp.run()
+ """
+
+ def __init__(self):
+ super().__init__()
+ self._setup_handlers()
+
+ @classmethod
+ @override
+ def create(cls, **kwargs: Any) -> "SyncACP":
+ """Create and initialize SyncACP instance
+
+ Args:
+ **kwargs: Configuration parameters (unused in sync implementation)
+
+ Returns:
+ Initialized SyncACP instance
+ """
+ logger.info("Creating SyncACP instance")
+ instance = cls()
+ logger.info("SyncACP instance created with default handlers")
+ return instance
+
+ @override
+ def _setup_handlers(self):
+ """Set up default handlers for sync operations"""
+
+ @self.on_message_send
+ async def handle_message_send( # type: ignore[unused-function]
+ params: SendMessageParams
+ ) -> TaskMessageContent | AsyncGenerator[TaskMessageUpdate, None]:
+ """Default message handler with TaskMessageUpdate streaming support
+
+ For streaming, the SyncACP server automatically creates the input and output
+ messages, so we just return TaskMessageUpdate objects with parent_task_message=None
+ """
+ logger.info(
+ f"SyncACP received message for task {params.task.id}: {params.content}"
+ )
+
+ if params.stream:
+ # Return streaming response
+ async def stream_response():
+ # Example: Stream 3 chunks
+ full_message = ""
+ for i in range(3):
+ data = f"Streaming chunk {i+1}: Processing your request...\n"
+ full_message += data
+ yield StreamTaskMessageDelta(
+ type="delta",
+ index=0,
+ delta=TextDelta(
+ text_delta=f"Streaming chunk {i+1}: Processing your request...\n"
+ ),
+ )
+
+ # Final response
+ yield StreamTaskMessageFull(
+ type="full",
+ index=0,
+ content=TextContent(
+ author="agent",
+ content=full_message,
+ format="markdown",
+ ),
+ )
+
+ return stream_response()
+ else:
+ # Return single response for non-streaming
+ return TextContent(
+ author="agent",
+ content=f"Processed message for task {params.task.id}",
+ format="markdown",
+ )
diff --git a/src/agentex/lib/sdk/fastacp/impl/temporal_acp.py b/src/agentex/lib/sdk/fastacp/impl/temporal_acp.py
new file mode 100644
index 000000000..f2ec2a732
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/impl/temporal_acp.py
@@ -0,0 +1,97 @@
+from contextlib import asynccontextmanager
+from typing import AsyncGenerator, Callable
+
+from fastapi import FastAPI
+
+from agentex.lib.core.clients.temporal.temporal_client import TemporalClient
+from agentex.lib.core.temporal.services.temporal_task_service import TemporalTaskService
+from agentex.lib.environment_variables import EnvironmentVariables
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.types.acp import (
+ CancelTaskParams,
+ CreateTaskParams,
+ SendEventParams,
+)
+from agentex.lib.utils.logging import make_logger
+
+logger = make_logger(__name__)
+
+
+class TemporalACP(BaseACPServer):
+ """
+ Temporal-specific implementation of AsyncAgentACP.
+ Uses TaskService to forward operations to temporal workflows.
+ """
+
+ def __init__(
+ self, temporal_address: str, temporal_task_service: TemporalTaskService | None = None
+ ):
+ super().__init__()
+ self._temporal_task_service = temporal_task_service
+ self._temporal_address = temporal_address
+
+ @classmethod
+ def create(cls, temporal_address: str) -> "TemporalACP":
+ logger.info("Initializing TemporalACP instance")
+
+ # Create instance without temporal client initially
+ temporal_acp = cls(temporal_address=temporal_address)
+ temporal_acp._setup_handlers()
+ logger.info("TemporalACP instance initialized now")
+ return temporal_acp
+
+ # This is to override the lifespan function of the base
+ def get_lifespan_function(self) -> Callable[[FastAPI], AsyncGenerator[None, None]]:
+ @asynccontextmanager
+ async def lifespan(app: FastAPI):
+ # Create temporal client during startup
+ if self._temporal_address is None:
+ raise ValueError("Temporal address is not set")
+
+ if self._temporal_task_service is None:
+ env_vars = EnvironmentVariables.refresh()
+ temporal_client = await TemporalClient.create(
+ temporal_address=self._temporal_address
+ )
+ self._temporal_task_service = TemporalTaskService(
+ temporal_client=temporal_client,
+ env_vars=env_vars,
+ )
+
+ # Call parent lifespan for agent registration
+ async with super().get_lifespan_function()(app):
+ yield
+
+ return lifespan
+
+ def _setup_handlers(self):
+ """Set up the handlers for temporal workflow operations"""
+
+ @self.on_task_create
+ async def handle_task_create(params: CreateTaskParams) -> None:
+ """Default create task handler - logs the task"""
+ logger.info(f"TemporalACP received task create rpc call for task {params.task.id}")
+ await self._temporal_task_service.submit_task(agent=params.agent, task=params.task)
+
+ @self.on_task_event_send
+ async def handle_event_send(params: SendEventParams) -> None:
+ """Forward messages to running workflows via TaskService"""
+ try:
+ await self._temporal_task_service.send_event(
+ agent=params.agent,
+ task=params.task,
+ event=params.event,
+ )
+
+ except Exception as e:
+ logger.error(f"Failed to send message: {e}")
+ raise
+
+ @self.on_task_cancel
+ async def handle_cancel(params: CancelTaskParams) -> None:
+ """Cancel running workflows via TaskService"""
+ try:
+ await self._temporal_task_service.cancel(task_id=params.task.id)
+ except Exception as e:
+ logger.error(f"Failed to cancel task: {e}")
+ raise
diff --git a/src/agentex/lib/sdk/fastacp/tests/README.md b/src/agentex/lib/sdk/fastacp/tests/README.md
new file mode 100644
index 000000000..4f5c6f9b1
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/README.md
@@ -0,0 +1,297 @@
+# BaseACPServer Test Suite
+
+This directory contains comprehensive tests for the `BaseACPServer` and its implementations (`SyncACP`, `AgenticBaseACP`, and `TemporalACP`).
+
+## Test Structure
+
+The test suite is organized into several categories:
+
+### 1. Core Unit Tests (`test_base_acp_server.py`)
+- **TestBaseACPServerInitialization**: Server initialization and setup
+- **TestHealthCheckEndpoint**: Health check endpoint functionality
+- **TestJSONRPCEndpointCore**: Basic JSON-RPC endpoint functionality
+- **TestHandlerRegistration**: Handler registration and management
+- **TestBackgroundProcessing**: Background task processing
+- **TestErrorHandling**: Basic error handling scenarios
+
+### 2. JSON-RPC Endpoint Tests (`test_json_rpc_endpoints.py`)
+- **TestJSONRPCMethodHandling**: Method routing and execution
+- **TestJSONRPCParameterValidation**: Parameter parsing and validation
+- **TestJSONRPCResponseFormat**: Response formatting compliance
+- **TestJSONRPCErrorCodes**: JSON-RPC 2.0 error code compliance
+- **TestJSONRPCConcurrency**: Concurrent request handling
+
+### 3. Integration Tests (`test_server_integration.py`)
+- **TestServerLifecycle**: Server startup, running, and shutdown
+- **TestHTTPClientIntegration**: Real HTTP client interactions
+- **TestHandlerExecutionIntegration**: Handler execution in server environment
+- **TestServerPerformance**: Performance characteristics
+
+### 4. Implementation Tests (`test_implementations.py`)
+- **TestSyncACP**: SyncACP-specific functionality
+- **TestAgenticBaseACP**: AgenticBaseACP-specific functionality
+- **TestTemporalACP**: TemporalACP-specific functionality
+- **TestImplementationComparison**: Differences between implementations
+- **TestImplementationErrorHandling**: Implementation-specific error handling
+
+### 5. Error Handling Tests (`test_error_handling.py`)
+- **TestMalformedRequestHandling**: Invalid and malformed requests
+- **TestHandlerErrorHandling**: Handler-level error scenarios
+- **TestServerErrorHandling**: Server-level error handling
+- **TestEdgeCases**: Edge cases and boundary conditions
+
+## Running Tests
+
+### Prerequisites
+
+Install test dependencies:
+```bash
+pip install pytest pytest-asyncio httpx pytest-cov pytest-xdist
+```
+
+### Basic Usage
+
+Run all tests:
+```bash
+python run_tests.py
+```
+
+Run specific test categories:
+```bash
+python run_tests.py --category unit
+python run_tests.py --category integration
+python run_tests.py --category implementations
+python run_tests.py --category error
+```
+
+### Advanced Options
+
+Run with coverage:
+```bash
+python run_tests.py --coverage
+```
+
+Run in parallel:
+```bash
+python run_tests.py --parallel 4
+```
+
+Run with increased verbosity:
+```bash
+python run_tests.py -vv
+```
+
+Stop on first failure:
+```bash
+python run_tests.py --failfast
+```
+
+Run only failed tests from last run:
+```bash
+python run_tests.py --lf
+```
+
+### Quick Test Options
+
+For development, use these quick test commands:
+
+```bash
+# Quick smoke tests
+python run_tests.py smoke
+
+# Quick development tests
+python run_tests.py quick
+
+# Performance tests only
+python run_tests.py perf
+```
+
+### Direct pytest Usage
+
+You can also run tests directly with pytest:
+
+```bash
+# Run all tests
+pytest
+
+# Run specific test file
+pytest test_base_acp_server.py
+
+# Run specific test class
+pytest test_base_acp_server.py::TestBaseACPServerInitialization
+
+# Run specific test method
+pytest test_base_acp_server.py::TestBaseACPServerInitialization::test_base_acp_server_init
+
+# Run with markers
+pytest -m "not slow"
+```
+
+## Test Configuration
+
+### Fixtures (`conftest.py`)
+
+The test suite uses several fixtures:
+
+- **`free_port`**: Provides a free port for testing
+- **`sample_task`**, **`sample_message`**: Sample data objects
+- **`base_acp_server`**, **`sync_acp`**, **`agentic_base_acp`**, **`mock_temporal_acp`**: Server instances
+- **`test_server_runner`**: Manages server lifecycle for integration tests
+- **`jsonrpc_client_factory`**: Creates JSON-RPC test clients
+- **`mock_env_vars`**: Mocked environment variables
+
+### Test Utilities
+
+- **`TestServerRunner`**: Manages server startup/shutdown for integration tests
+- **`JSONRPCTestClient`**: Simplified JSON-RPC client for testing
+- **`find_free_port()`**: Utility to find available ports
+
+## Test Categories Explained
+
+### Unit Tests
+Focus on individual components in isolation:
+- Server initialization
+- Handler registration
+- Basic endpoint functionality
+- Parameter validation
+
+### Integration Tests
+Test components working together:
+- Full server lifecycle
+- Real HTTP requests
+- Handler execution in server context
+- Performance characteristics
+
+### Implementation Tests
+Test specific ACP implementations:
+- SyncACP behavior
+- AgenticBaseACP send_event functionality
+- TemporalACP workflow integration
+- Implementation differences
+
+### Error Handling Tests
+Comprehensive error scenarios:
+- Malformed JSON-RPC requests
+- Handler exceptions
+- Server error recovery
+- Edge cases and boundary conditions
+
+## Writing New Tests
+
+### Test Naming Convention
+- Test files: `test_*.py`
+- Test classes: `Test*`
+- Test methods: `test_*`
+
+### Async Test Example
+```python
+@pytest.mark.asyncio
+async def test_my_async_functionality(self, base_acp_server):
+ # Your async test code here
+ result = await some_async_operation()
+ assert result is not None
+```
+
+### Integration Test Example
+```python
+@pytest.mark.asyncio
+async def test_server_integration(self, base_acp_server, free_port, test_server_runner):
+ runner = test_server_runner(base_acp_server, free_port)
+ await runner.start()
+
+ try:
+ # Test server functionality
+ async with httpx.AsyncClient() as client:
+ response = await client.get(f"http://127.0.0.1:{free_port}/healthz")
+ assert response.status_code == 200
+ finally:
+ await runner.stop()
+```
+
+### Handler Test Example
+```python
+@pytest.mark.asyncio
+async def test_custom_handler(self, base_acp_server):
+ handler_called = False
+
+ @base_acp_server.on_task_event_send
+ async def test_handler(params: SendEventParams):
+ nonlocal handler_called
+ handler_called = True
+ return {"handled": True}
+
+ # Test handler execution
+ params = SendEventParams(...)
+ result = await base_acp_server._handlers[RPCMethod.EVENT_SEND](params)
+
+ assert handler_called is True
+ assert result["handled"] is True
+```
+
+## Continuous Integration
+
+The test suite is designed to work well in CI environments:
+
+- Tests are isolated and don't interfere with each other
+- Ports are dynamically allocated to avoid conflicts
+- Background tasks are properly cleaned up
+- Timeouts are reasonable for CI environments
+
+### CI Configuration Example
+
+```yaml
+# .github/workflows/test.yml
+name: Tests
+on: [push, pull_request]
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: '3.9'
+ - run: pip install -r requirements.txt
+ - run: pip install pytest pytest-asyncio httpx pytest-cov
+ - run: cd agentex/sdk/fastacp/tests && python run_tests.py --coverage
+```
+
+## Troubleshooting
+
+### Common Issues
+
+1. **Port conflicts**: Tests use dynamic port allocation, but if you see port conflicts, try running tests sequentially:
+ ```bash
+ python run_tests.py --parallel 1
+ ```
+
+2. **Async test failures**: Make sure all async tests are marked with `@pytest.mark.asyncio`
+
+3. **Handler not found errors**: Ensure handlers are properly registered before testing
+
+4. **Timeout issues**: Some tests have built-in delays for background processing. If tests are flaky, increase sleep times in test code.
+
+### Debug Mode
+
+Run tests with maximum verbosity and no capture:
+```bash
+pytest -vvv -s --tb=long
+```
+
+### Memory Issues
+
+If you encounter memory issues with large tests:
+```bash
+python run_tests.py --markers "not memory_intensive"
+```
+
+## Contributing
+
+When adding new tests:
+
+1. Follow the existing test structure and naming conventions
+2. Add appropriate docstrings explaining what the test does
+3. Use fixtures for common setup
+4. Clean up resources properly (especially in integration tests)
+5. Add tests to the appropriate category in `run_tests.py`
+6. Update this README if adding new test categories or significant functionality
\ No newline at end of file
diff --git a/src/agentex/lib/sdk/fastacp/tests/conftest.py b/src/agentex/lib/sdk/fastacp/tests/conftest.py
new file mode 100644
index 000000000..59b9e1013
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/conftest.py
@@ -0,0 +1,307 @@
+import asyncio
+import socket
+import time
+from typing import Any
+from unittest.mock import AsyncMock, patch
+
+import httpx
+import pytest
+import pytest_asyncio
+import uvicorn
+
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.sdk.fastacp.impl.agentic_base_acp import AgenticBaseACP
+from agentex.lib.sdk.fastacp.impl.sync_acp import SyncACP
+from agentex.lib.sdk.fastacp.impl.temporal_acp import TemporalACP
+from agentex.lib.types.acp import (
+ CancelTaskParams,
+ CreateTaskParams,
+ SendMessageParams,
+)
+from agentex.lib.types.json_rpc import JSONRPCRequest
+from agentex.types.agent import Agent
+from agentex.types.task_message import TaskMessageContent
+from agentex.types.task_message_content import TextContent
+from agentex.types.task import Task
+
+# Configure pytest-asyncio
+pytest_plugins = ("pytest_asyncio",)
+
+
+def find_free_port() -> int:
+ """Find a free port for testing"""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("", 0))
+ s.listen(1)
+ port = s.getsockname()[1]
+ return port
+
+
+@pytest.fixture
+def free_port() -> int:
+ """Fixture that provides a free port for testing"""
+ return find_free_port()
+
+
+@pytest.fixture
+def sample_task() -> Task:
+ """Fixture that provides a sample Task object"""
+ return Task(
+ id="test-task-123", agent_id="test-agent-456", status=TaskStatus.RUNNING
+ )
+
+
+@pytest.fixture
+def sample_message_content() -> TaskMessageContent:
+ """Fixture that provides a sample TaskMessage object"""
+ return TextContent(
+ type="text",
+ author="user",
+ content="Hello, this is a test message",
+ )
+
+
+@pytest.fixture
+def sample_send_message_params(
+ sample_task: Task, sample_message_content: TaskMessageContent
+) -> SendMessageParams:
+ """Fixture that provides sample SendMessageParams"""
+ return SendMessageParams(
+ agent=Agent(
+ id="test-agent-456",
+ name="test-agent",
+ description="test-agent",
+ acp_type="sync",
+ ),
+ task=sample_task,
+ content=sample_message_content,
+ stream=False,
+ )
+
+
+@pytest.fixture
+def sample_cancel_task_params() -> CancelTaskParams:
+ """Fixture that provides sample CancelTaskParams"""
+ return CancelTaskParams(
+ agent=Agent(id="test-agent-456", name="test-agent", description="test-agent", acp_type="sync"),
+ task=Task(id="test-task-123", agent_id="test-agent-456", status="running"),
+ )
+
+
+@pytest.fixture
+def sample_create_task_params(sample_task: Task) -> CreateTaskParams:
+ """Fixture that provides sample CreateTaskParams"""
+ return CreateTaskParams(
+ agent=Agent(id="test-agent-456", name="test-agent", description="test-agent", acp_type="sync"),
+ task=sample_task,
+ params={},
+ )
+
+
+class TestServerRunner:
+ """Utility class for running test servers"""
+
+ def __init__(self, app: BaseACPServer, port: int):
+ self.app = app
+ self.port = port
+ self.server = None
+ self.server_task = None
+
+ async def start(self):
+ """Start the server in a background task"""
+ config = uvicorn.Config(
+ app=self.app,
+ host="127.0.0.1",
+ port=self.port,
+ log_level="error", # Reduce noise in tests
+ )
+ self.server = uvicorn.Server(config)
+ self.server_task = asyncio.create_task(self.server.serve())
+
+ # Wait for server to be ready
+ await self._wait_for_server()
+
+ async def stop(self):
+ """Stop the server"""
+ if self.server:
+ self.server.should_exit = True
+ if self.server_task:
+ try:
+ await asyncio.wait_for(self.server_task, timeout=5.0)
+ except TimeoutError:
+ self.server_task.cancel()
+ try:
+ await self.server_task
+ except asyncio.CancelledError:
+ pass
+
+ async def _wait_for_server(self, timeout: float = 10.0):
+ """Wait for server to be ready to accept connections"""
+ start_time = time.time()
+ while time.time() - start_time < timeout:
+ try:
+ async with httpx.AsyncClient() as client:
+ response = await client.get(f"http://127.0.0.1:{self.port}/healthz")
+ if response.status_code == 200:
+ return
+ except (httpx.ConnectError, httpx.ConnectTimeout):
+ await asyncio.sleep(0.1)
+ raise TimeoutError(f"Server did not start within {timeout} seconds")
+
+
+@pytest_asyncio.fixture
+async def test_server_runner():
+ """Fixture that provides a TestServerRunner factory"""
+ runners = []
+
+ def create_runner(app: BaseACPServer, port: int) -> TestServerRunner:
+ runner = TestServerRunner(app, port)
+ runners.append(runner)
+ return runner
+
+ yield create_runner
+
+ # Cleanup all runners
+ for runner in runners:
+ await runner.stop()
+
+
+@pytest.fixture
+def base_acp_server():
+ """Fixture that provides a BaseACPServer instance for sync tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = BaseACPServer()
+ return server
+
+
+@pytest_asyncio.fixture
+async def async_base_acp_server():
+ """Fixture that provides a BaseACPServer instance for async tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = BaseACPServer.create()
+ return server
+
+
+@pytest.fixture
+def sync_acp_server():
+ """Fixture that provides a SyncACP instance for sync tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = SyncACP()
+ return server
+
+
+@pytest_asyncio.fixture
+async def async_sync_acp_server():
+ """Fixture that provides a SyncACP instance for async tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = await SyncACP.create()
+ return server
+
+
+@pytest.fixture
+def agentic_base_acp_server():
+ """Fixture that provides an AgenticBaseACP instance for sync tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = AgenticBaseACP()
+ return server
+
+
+@pytest_asyncio.fixture
+async def async_agentic_base_acp_server():
+ """Fixture that provides an AgenticBaseACP instance for async tests"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ server = await AgenticBaseACP.create()
+ return server
+
+
+@pytest_asyncio.fixture
+async def mock_temporal_acp_server():
+ """Fixture that provides a mocked TemporalACP instance"""
+ with patch.dict(
+ "os.environ", {"AGENTEX_BASE_URL": ""}
+ ): # Disable agent registration
+ with patch(
+ "agentex.sdk.fastacp.impl.temporal_acp.TemporalClient"
+ ) as mock_temporal_client:
+ with patch(
+ "agentex.sdk.fastacp.impl.temporal_acp.AsyncAgentexClient"
+ ) as mock_agentex_client:
+ # Mock the temporal client creation
+ mock_temporal_client.create.return_value = AsyncMock()
+ mock_agentex_client.return_value = AsyncMock()
+
+ server = await TemporalACP.create(temporal_address="localhost:7233")
+ return server
+
+
+class JSONRPCTestClient:
+ """Test client for making JSON-RPC requests"""
+
+ def __init__(self, base_url: str):
+ self.base_url = base_url
+
+ async def call_method(
+ self, method: str, params: dict[str, Any], request_id: str | None = "test-1"
+ ) -> dict[str, Any]:
+ """Make a JSON-RPC method call"""
+ request = JSONRPCRequest(method=method, params=params, id=request_id)
+
+ async with httpx.AsyncClient() as client:
+ response = await client.post(
+ f"{self.base_url}/api",
+ json=request.model_dump(),
+ headers={"Content-Type": "application/json"},
+ )
+ return response.json()
+
+ async def send_notification(
+ self, method: str, params: dict[str, Any]
+ ) -> dict[str, Any]:
+ """Send a JSON-RPC notification (no ID)"""
+ return await self.call_method(method, params, request_id=None)
+
+ async def health_check(self) -> dict[str, Any]:
+ """Check server health"""
+ async with httpx.AsyncClient() as client:
+ response = await client.get(f"{self.base_url}/healthz")
+ return response.json()
+
+
+@pytest.fixture
+def jsonrpc_client_factory():
+ """Fixture that provides a JSONRPCTestClient factory"""
+
+ def create_client(base_url: str) -> JSONRPCTestClient:
+ return JSONRPCTestClient(base_url)
+
+ return create_client
+
+
+# Mock environment variables for testing
+@pytest.fixture
+def mock_env_vars():
+ """Fixture that mocks environment variables"""
+ env_vars = {
+ "AGENTEX_BASE_URL": "", # Disable agent registration by default
+ "AGENT_NAME": "test-agent",
+ "AGENT_DESCRIPTION": "Test agent description",
+ "ACP_URL": "http://localhost",
+ "ACP_PORT": "8000",
+ "WORKFLOW_NAME": "test-workflow",
+ "WORKFLOW_TASK_QUEUE": "test-queue",
+ }
+
+ with patch.dict("os.environ", env_vars):
+ yield env_vars
diff --git a/src/agentex/lib/sdk/fastacp/tests/pytest.ini b/src/agentex/lib/sdk/fastacp/tests/pytest.ini
new file mode 100644
index 000000000..c36f46f20
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/pytest.ini
@@ -0,0 +1,10 @@
+[tool:pytest]
+asyncio_mode = auto
+addopts = -v --tb=short
+testpaths = .
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+filterwarnings =
+ ignore::DeprecationWarning
+ ignore::PytestDeprecationWarning
\ No newline at end of file
diff --git a/src/agentex/lib/sdk/fastacp/tests/run_tests.py b/src/agentex/lib/sdk/fastacp/tests/run_tests.py
new file mode 100644
index 000000000..eb08d9c12
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/run_tests.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3
+"""
+Test runner for BaseACPServer and implementations.
+
+This script provides various options for running the test suite:
+- Run all tests
+- Run specific test categories
+- Run with different verbosity levels
+- Generate coverage reports
+- Run performance tests
+"""
+
+import argparse
+import subprocess
+import sys
+from pathlib import Path
+
+
+def run_command(cmd, description=""):
+ """Run a command and return the result"""
+ if description:
+ print(f"\n{'='*60}")
+ print(f"Running: {description}")
+ print(f"Command: {' '.join(cmd)}")
+ print(f"{'='*60}")
+
+ result = subprocess.run(cmd, capture_output=True, text=True, check=False)
+
+ if result.stdout:
+ print(result.stdout)
+ if result.stderr:
+ print(result.stderr, file=sys.stderr)
+
+ return result.returncode == 0
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Run BaseACPServer tests")
+ parser.add_argument(
+ "--category",
+ choices=["unit", "integration", "implementations", "error", "all"],
+ default="all",
+ help="Test category to run",
+ )
+ parser.add_argument(
+ "--verbose",
+ "-v",
+ action="count",
+ default=0,
+ help="Increase verbosity (use -v, -vv, or -vvv)",
+ )
+ parser.add_argument("--coverage", action="store_true", help="Run with coverage reporting")
+ parser.add_argument(
+ "--parallel", "-n", type=int, help="Run tests in parallel (number of workers)"
+ )
+ parser.add_argument(
+ "--markers", "-m", help="Run tests with specific markers (e.g., 'not slow')"
+ )
+ parser.add_argument("--failfast", "-x", action="store_true", help="Stop on first failure")
+ parser.add_argument(
+ "--lf",
+ "--last-failed",
+ action="store_true",
+ help="Run only tests that failed in the last run",
+ )
+ parser.add_argument(
+ "--collect-only", action="store_true", help="Only collect tests, don't run them"
+ )
+
+ args = parser.parse_args()
+
+ # Base pytest command
+ cmd = ["python", "-m", "pytest"]
+
+ # Add test files based on category
+ test_files = {
+ "unit": ["test_base_acp_server.py", "test_json_rpc_endpoints.py"],
+ "integration": ["test_server_integration.py"],
+ "implementations": ["test_implementations.py"],
+ "error": ["test_error_handling.py"],
+ "all": [
+ "test_base_acp_server.py",
+ "test_json_rpc_endpoints.py",
+ "test_server_integration.py",
+ "test_implementations.py",
+ "test_error_handling.py",
+ ],
+ }
+
+ # Add test files to command
+ for test_file in test_files[args.category]:
+ cmd.append(test_file)
+
+ # Add verbosity
+ if args.verbose:
+ cmd.append("-" + "v" * min(args.verbose, 3))
+
+ # Add coverage
+ if args.coverage:
+ cmd.extend(
+ [
+ "--cov=agentex.sdk.fastacp",
+ "--cov-report=html",
+ "--cov-report=term-missing",
+ "--cov-branch",
+ ]
+ )
+
+ # Add parallel execution
+ if args.parallel:
+ cmd.extend(["-n", str(args.parallel)])
+
+ # Add markers
+ if args.markers:
+ cmd.extend(["-m", args.markers])
+
+ # Add fail fast
+ if args.failfast:
+ cmd.append("-x")
+
+ # Add last failed
+ if args.lf:
+ cmd.append("--lf")
+
+ # Add collect only
+ if args.collect_only:
+ cmd.append("--collect-only")
+
+ # Add other useful options
+ cmd.extend(
+ [
+ "--tb=short", # Shorter traceback format
+ "--strict-markers", # Strict marker checking
+ "--disable-warnings", # Disable warnings for cleaner output
+ ]
+ )
+
+ # Change to test directory
+ test_dir = Path(__file__).parent
+ original_cwd = Path.cwd()
+
+ try:
+ import os
+
+ os.chdir(test_dir)
+
+ # Run the tests
+ success = run_command(cmd, f"Running {args.category} tests")
+
+ if success:
+ print(f"\n✅ All {args.category} tests passed!")
+ if args.coverage:
+ print("📊 Coverage report generated in htmlcov/")
+ else:
+ print(f"\n❌ Some {args.category} tests failed!")
+ return 1
+
+ finally:
+ os.chdir(original_cwd)
+
+ return 0
+
+
+def run_quick_tests():
+ """Run a quick subset of tests for development"""
+ cmd = [
+ "python",
+ "-m",
+ "pytest",
+ "test_base_acp_server.py::TestBaseACPServerInitialization",
+ "test_json_rpc_endpoints.py::TestJSONRPCMethodHandling",
+ "-v",
+ "--tb=short",
+ ]
+
+ return run_command(cmd, "Running quick development tests")
+
+
+def run_smoke_tests():
+ """Run smoke tests to verify basic functionality"""
+ cmd = [
+ "python",
+ "-m",
+ "pytest",
+ "-m",
+ "not slow",
+ "-x", # Stop on first failure
+ "--tb=line",
+ "test_base_acp_server.py::TestBaseACPServerInitialization::test_base_acp_server_init",
+ "test_base_acp_server.py::TestHealthCheckEndpoint::test_health_check_endpoint",
+ "test_json_rpc_endpoints.py::TestJSONRPCMethodHandling::test_message_received_method_routing",
+ ]
+
+ return run_command(cmd, "Running smoke tests")
+
+
+def run_performance_tests():
+ """Run performance-focused tests"""
+ cmd = [
+ "python",
+ "-m",
+ "pytest",
+ "test_server_integration.py::TestServerPerformance",
+ "test_error_handling.py::TestServerErrorHandling::test_server_handles_concurrent_errors",
+ "-v",
+ "--tb=short",
+ ]
+
+ return run_command(cmd, "Running performance tests")
+
+
+if __name__ == "__main__":
+ # Check if specific test type is requested via environment
+ test_type = (
+ sys.argv[1] if len(sys.argv) > 1 and sys.argv[1] in ["quick", "smoke", "perf"] else None
+ )
+
+ if test_type == "quick":
+ success = run_quick_tests()
+ elif test_type == "smoke":
+ success = run_smoke_tests()
+ elif test_type == "perf":
+ success = run_performance_tests()
+ else:
+ success = main()
+
+ sys.exit(0 if success else 1)
diff --git a/src/agentex/lib/sdk/fastacp/tests/test_base_acp_server.py b/src/agentex/lib/sdk/fastacp/tests/test_base_acp_server.py
new file mode 100644
index 000000000..b0f26a811
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/test_base_acp_server.py
@@ -0,0 +1,450 @@
+import asyncio
+from unittest.mock import patch
+
+import pytest
+from fastapi.testclient import TestClient
+
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.types.acp import (
+ CancelTaskParams,
+ RPCMethod,
+ SendEventParams,
+)
+
+
+class TestBaseACPServerInitialization:
+ """Test BaseACPServer initialization and setup"""
+
+ def test_base_acp_server_init(self):
+ """Test BaseACPServer initialization sets up routes correctly"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer()
+
+ # Check that FastAPI routes are set up
+ routes = [route.path for route in server.routes]
+ assert "/healthz" in routes
+ assert "/api" in routes
+
+ # Check that handlers dict is initialized
+ assert hasattr(server, "_handlers")
+ assert isinstance(server._handlers, dict)
+
+ def test_base_acp_server_create_classmethod(self):
+ """Test BaseACPServer.create() class method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer.create()
+
+ assert isinstance(server, BaseACPServer)
+ assert hasattr(server, "_handlers")
+
+ def test_lifespan_function_setup(self):
+ """Test that lifespan function is properly configured"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer()
+
+ # Check that lifespan is configured
+ assert server.router.lifespan_context is not None
+
+
+class TestHealthCheckEndpoint:
+ """Test health check endpoint functionality"""
+
+ def test_health_check_endpoint(self, base_acp_server):
+ """Test GET /healthz endpoint returns correct response"""
+ client = TestClient(base_acp_server)
+
+ response = client.get("/healthz")
+
+ assert response.status_code == 200
+ assert response.json() == {"status": "healthy"}
+
+ def test_health_check_content_type(self, base_acp_server):
+ """Test health check returns JSON content type"""
+ client = TestClient(base_acp_server)
+
+ response = client.get("/healthz")
+
+ assert response.headers["content-type"] == "application/json"
+
+
+class TestJSONRPCEndpointCore:
+ """Test core JSON-RPC endpoint functionality"""
+
+ def test_jsonrpc_endpoint_exists(self, base_acp_server):
+ """Test POST /api endpoint exists"""
+ client = TestClient(base_acp_server)
+
+ # Send a basic request to check endpoint exists
+ response = client.post("/api", json={})
+
+ # Should not return 404 (endpoint exists)
+ assert response.status_code != 404
+
+ def test_jsonrpc_malformed_request(self, base_acp_server):
+ """Test JSON-RPC endpoint handles malformed requests"""
+ client = TestClient(base_acp_server)
+
+ # Send malformed JSON
+ response = client.post("/api", json={"invalid": "request"})
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "error" in data
+ assert data["jsonrpc"] == "2.0"
+
+ def test_jsonrpc_method_not_found(self, base_acp_server):
+ """Test JSON-RPC method not found error"""
+ client = TestClient(base_acp_server)
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "nonexistent/method",
+ "params": {},
+ "id": "test-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "error" in data
+ assert data["error"]["code"] == -32601 # Method not found
+ assert data["id"] == "test-1"
+
+ def test_jsonrpc_valid_request_structure(self, base_acp_server):
+ """Test JSON-RPC request parsing with valid structure"""
+ client = TestClient(base_acp_server)
+
+ # Add a mock handler for testing
+ async def mock_handler(params):
+ return {"status": "success"}
+
+ base_acp_server._handlers[RPCMethod.EVENT_SEND] = mock_handler
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "test-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "test message",
+ },
+ },
+ "id": "test-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["jsonrpc"] == "2.0"
+ assert data["id"] == "test-1"
+ print("DATA", data)
+ # Should return immediate acknowledgment
+ assert data["result"]["status"] == "processing"
+
+
+class TestHandlerRegistration:
+ """Test handler registration and management"""
+
+ def test_on_task_event_send_decorator(self):
+ """Test on_task_event_send decorator registration"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer()
+
+ @server.on_task_event_send
+ async def test_handler(params: SendEventParams):
+ return {"test": "response"}
+
+ # Check handler is registered
+ assert RPCMethod.EVENT_SEND in server._handlers
+ assert server._handlers[RPCMethod.EVENT_SEND] is not None
+
+ def test_cancel_task_decorator(self):
+ """Test cancel_task decorator registration"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer()
+
+ @server.on_task_cancel
+ async def test_handler(params: CancelTaskParams):
+ return {"test": "response"}
+
+ # Check handler is registered
+ assert RPCMethod.TASK_CANCEL in server._handlers
+ assert server._handlers[RPCMethod.TASK_CANCEL] is not None
+
+ @pytest.mark.asyncio
+ async def test_handler_wrapper_functionality(self):
+ """Test that handler wrapper works correctly"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ server = BaseACPServer()
+
+ # Create a test handler
+ async def test_handler(params):
+ return {"handler_called": True, "params_received": True}
+
+ # Wrap the handler
+ wrapped = server._wrap_handler(test_handler)
+
+ # Test the wrapped handler
+ result = await wrapped({"test": "params"})
+ assert result["handler_called"] is True
+ assert result["params_received"] is True
+
+
+class TestBackgroundProcessing:
+ """Test background processing functionality"""
+
+ @pytest.mark.asyncio
+ async def test_notification_processing(self, async_base_acp_server):
+ """Test notification processing (requests with no ID)"""
+ # Add a mock handler
+ handler_called = False
+ received_params = None
+
+ async def mock_handler(params):
+ nonlocal handler_called, received_params
+ handler_called = True
+ received_params = params
+ return {"status": "processed"}
+
+ async_base_acp_server._handlers[RPCMethod.EVENT_SEND] = mock_handler
+
+ client = TestClient(async_base_acp_server)
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "test-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "test message",
+ },
+ },
+ # No ID = notification
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["id"] is None # Notification response
+
+ # Give background task time to execute
+ await asyncio.sleep(0.1)
+
+ # Handler should have been called
+ assert handler_called is True
+ assert received_params is not None
+
+ @pytest.mark.asyncio
+ async def test_request_processing_with_id(self, async_base_acp_server):
+ """Test request processing with ID returns immediate acknowledgment"""
+
+ # Add a mock handler
+ async def mock_handler(params):
+ return {"status": "processed"}
+
+ async_base_acp_server._handlers[RPCMethod.TASK_CANCEL] = mock_handler
+
+ client = TestClient(async_base_acp_server)
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "task/cancel",
+ "params": {"task_id": "test-task-123"},
+ "id": "test-request-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert data["jsonrpc"] == "2.0"
+ assert data["id"] == "test-request-1"
+ assert data["result"]["status"] == "processing" # Immediate acknowledgment
+
+
+class TestSynchronousRPCMethods:
+ """Test synchronous RPC methods that return results immediately"""
+
+ def test_send_message_synchronous_response(self, base_acp_server):
+ """Test that MESSAGE_SEND method returns handler result synchronously"""
+ client = TestClient(base_acp_server)
+
+ # Add a mock handler that returns a specific result
+ async def mock_execute_handler(params):
+ return {
+ "task_id": params.task.id,
+ "message_content": params.message.content,
+ "status": "executed_synchronously",
+ "custom_data": {"processed": True, "timestamp": "2024-01-01T12:00:00Z"},
+ }
+
+ base_acp_server._handlers[RPCMethod.MESSAGE_SEND] = mock_execute_handler
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "message/send",
+ "params": {
+ "task": {"id": "test-task-123", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "Execute this task please",
+ },
+ },
+ "id": "test-execute-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+
+ # Verify JSON-RPC structure
+ assert data["jsonrpc"] == "2.0"
+ assert data["id"] == "test-execute-1"
+ assert "result" in data
+ assert data.get("error") is None
+
+ # Verify the handler's result is returned directly (not "processing" status)
+ result = data["result"]
+ assert result["task_id"] == "test-task-123"
+ assert result["message_content"] == "Execute this task please"
+ assert result["status"] == "executed_synchronously"
+ assert result["custom_data"]["processed"] is True
+ assert result["custom_data"]["timestamp"] == "2024-01-01T12:00:00Z"
+
+ # Verify it's NOT the async "processing" response
+ assert result.get("status") != "processing"
+
+ def test_create_task_async_response(self, base_acp_server):
+ """Test that TASK_CREATE method returns processing status (async behavior)"""
+ client = TestClient(base_acp_server)
+
+ # Add a mock handler for init task
+ async def mock_init_handler(params):
+ return {
+ "task_id": params.task.id,
+ "status": "initialized",
+ }
+
+ base_acp_server._handlers[RPCMethod.TASK_CREATE] = mock_init_handler
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "task/create",
+ "params": {
+ "task": {"id": "test-task-456", "agent_id": "test-agent", "status": "RUNNING"}
+ },
+ "id": "test-init-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+
+ # Verify JSON-RPC structure
+ assert data["jsonrpc"] == "2.0"
+ assert data["id"] == "test-init-1"
+ assert "result" in data
+ assert data.get("error") is None
+
+ # Verify it returns async "processing" status (not the handler's result)
+ result = data["result"]
+ assert result["status"] == "processing"
+
+ # Verify it's NOT the handler's actual result
+ assert result.get("status") != "initialized"
+
+
+class TestErrorHandling:
+ """Test error handling scenarios"""
+
+ def test_invalid_json_request(self, base_acp_server):
+ """Test handling of invalid JSON in request body"""
+ client = TestClient(base_acp_server)
+
+ # Send invalid JSON
+ response = client.post(
+ "/api", content="invalid json", headers={"Content-Type": "application/json"}
+ )
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "error" in data
+ assert data["jsonrpc"] == "2.0"
+
+ def test_missing_required_fields(self, base_acp_server):
+ """Test handling of requests missing required JSON-RPC fields"""
+ client = TestClient(base_acp_server)
+
+ # Missing method field
+ request = {"jsonrpc": "2.0", "params": {}, "id": "test-1"}
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "error" in data
+
+ def test_invalid_method_enum(self, base_acp_server):
+ """Test handling of invalid method names"""
+ client = TestClient(base_acp_server)
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "invalid/method/name",
+ "params": {},
+ "id": "test-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "error" in data
+ assert data["error"]["code"] == -32601 # Method not found
+
+ @pytest.mark.asyncio
+ async def test_handler_exception_handling(self, async_base_acp_server):
+ """Test that handler exceptions are properly handled"""
+
+ # Add a handler that raises an exception
+ async def failing_handler(params):
+ raise ValueError("Test exception")
+
+ async_base_acp_server._handlers[RPCMethod.EVENT_SEND] = failing_handler
+
+ client = TestClient(async_base_acp_server)
+
+ request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "test-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "test message",
+ },
+ },
+ "id": "test-1",
+ }
+
+ response = client.post("/api", json=request)
+
+ # Should still return immediate acknowledgment
+ assert response.status_code == 200
+ data = response.json()
+ assert data["result"]["status"] == "processing"
+
+ # Give background task time to fail
+ await asyncio.sleep(0.1)
+ # Exception should be logged but not crash the server
diff --git a/src/agentex/lib/sdk/fastacp/tests/test_fastacp_factory.py b/src/agentex/lib/sdk/fastacp/tests/test_fastacp_factory.py
new file mode 100644
index 000000000..62bdc1256
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/test_fastacp_factory.py
@@ -0,0 +1,344 @@
+import asyncio
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+
+from agentex.lib.sdk.fastacp.base.base_acp_server import BaseACPServer
+from agentex.lib.sdk.fastacp.fastacp import FastACP
+from agentex.lib.sdk.fastacp.impl.agentic_base_acp import AgenticBaseACP
+from agentex.lib.sdk.fastacp.impl.sync_acp import SyncACP
+from agentex.lib.sdk.fastacp.impl.temporal_acp import TemporalACP
+from agentex.lib.types.fastacp import (
+ AgenticACPConfig,
+ AgenticBaseACPConfig,
+ SyncACPConfig,
+ TemporalACPConfig,
+)
+
+
+class TestFastACPInitialization:
+ """Test FastACP basic functionality"""
+
+ def test_factory_class_exists(self):
+ """Test that FastACP class exists and is properly structured"""
+ assert hasattr(FastACP, "create")
+ assert hasattr(FastACP, "create_sync_acp")
+ assert hasattr(FastACP, "create_agentic_acp")
+
+
+class TestSyncACPCreation:
+ """Test SyncACP creation through factory"""
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_direct_method(self):
+ """Test creating SyncACP using direct method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = FastACP.create_sync_acp()
+
+ assert isinstance(sync_acp, SyncACP)
+ assert isinstance(sync_acp, BaseACPServer)
+ assert hasattr(sync_acp, "_handlers")
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_with_config(self):
+ """Test creating SyncACP with configuration"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = SyncACPConfig()
+ sync_acp = FastACP.create_sync_acp(config=config)
+
+ assert isinstance(sync_acp, SyncACP)
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_via_generic_create(self):
+ """Test creating SyncACP via generic create method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = FastACP.create("sync")
+
+ assert isinstance(sync_acp, SyncACP)
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_via_generic_create_with_config(self):
+ """Test creating SyncACP via generic create method with config"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = SyncACPConfig()
+ sync_acp = FastACP.create("sync", config=config)
+
+ assert isinstance(sync_acp, SyncACP)
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_with_enum(self):
+ """Test creating SyncACP using ACPType enum"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = FastACP.create("sync")
+
+ assert isinstance(sync_acp, SyncACP)
+
+ @pytest.mark.asyncio
+ async def test_create_sync_acp_with_kwargs(self):
+ """Test creating SyncACP with additional kwargs"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = FastACP.create_sync_acp(custom_param="test_value")
+
+ assert isinstance(sync_acp, SyncACP)
+
+
+class TestAgenticBaseACPCreation:
+ """Test AgenticBaseACP creation through factory"""
+
+ @pytest.mark.asyncio
+ async def test_create_agentic_base_acp_direct_method(self):
+ """Test creating AgenticBaseACP using direct method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="base")
+ agentic_acp = FastACP.create_agentic_acp(config=config)
+
+ assert isinstance(agentic_acp, AgenticBaseACP)
+ assert isinstance(agentic_acp, BaseACPServer)
+
+ @pytest.mark.asyncio
+ async def test_create_agentic_base_acp_with_specific_config(self):
+ """Test creating AgenticBaseACP with AgenticBaseACPConfig"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticBaseACPConfig(type="base")
+ agentic_acp = FastACP.create_agentic_acp(config=config)
+
+ assert isinstance(agentic_acp, AgenticBaseACP)
+
+ @pytest.mark.asyncio
+ async def test_create_agentic_base_acp_via_generic_create(self):
+ """Test creating AgenticBaseACP via generic create method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="base")
+ agentic_acp = FastACP.create("agentic", config=config)
+
+ assert isinstance(agentic_acp, AgenticBaseACP)
+
+ @pytest.mark.asyncio
+ async def test_create_agentic_base_acp_with_enum(self):
+ """Test creating AgenticBaseACP using ACPType enum"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="base")
+ agentic_acp = FastACP.create("agentic", config=config)
+
+ assert isinstance(agentic_acp, AgenticBaseACP)
+
+
+class TestAgenticTemporalACPCreation:
+ """Test AgenticTemporalACP (TemporalACP) creation through factory"""
+
+ @pytest.mark.asyncio
+ async def test_create_temporal_acp_direct_method(self):
+ """Test creating TemporalACP using direct method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="temporal")
+
+ # Mock the TemporalACP.create method since it requires temporal dependencies
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ temporal_acp = FastACP.create_agentic_acp(config=config)
+
+ assert temporal_acp == mock_temporal_instance
+ mock_create.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_create_temporal_acp_with_temporal_config(self):
+ """Test creating TemporalACP with TemporalACPConfig"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = TemporalACPConfig(type="temporal", temporal_address="localhost:7233")
+
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ temporal_acp = FastACP.create_agentic_acp(config=config)
+
+ assert temporal_acp == mock_temporal_instance
+ # Verify temporal_address was passed
+ mock_create.assert_called_once_with(temporal_address="localhost:7233")
+
+ @pytest.mark.asyncio
+ async def test_create_temporal_acp_via_generic_create(self):
+ """Test creating TemporalACP via generic create method"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="temporal")
+
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ temporal_acp = FastACP.create("agentic", config=config)
+
+ assert temporal_acp == mock_temporal_instance
+
+ @pytest.mark.asyncio
+ async def test_create_temporal_acp_with_custom_address(self):
+ """Test creating TemporalACP with custom temporal address"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = TemporalACPConfig(type="temporal", temporal_address="custom-temporal:9999")
+
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ FastACP.create_agentic_acp(config=config)
+
+ mock_create.assert_called_once_with(temporal_address="custom-temporal:9999")
+
+
+class TestConfigurationValidation:
+ """Test configuration validation and error handling"""
+
+ @pytest.mark.asyncio
+ async def test_agentic_requires_config(self):
+ """Test that agentic ACP creation requires configuration"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ with pytest.raises(ValueError, match="AgenticACPConfig is required"):
+ FastACP.create("agentic")
+
+ @pytest.mark.asyncio
+ async def test_agentic_requires_correct_config_type(self):
+ """Test that agentic ACP creation requires AgenticACPConfig type"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_config = SyncACPConfig()
+
+ with pytest.raises(ValueError, match="AgenticACPConfig is required"):
+ FastACP.create("agentic", config=sync_config)
+
+ @pytest.mark.asyncio
+ async def test_agentic_direct_method_requires_config(self):
+ """Test that direct agentic method requires configuration"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ # This should raise TypeError since config is required parameter
+ with pytest.raises(TypeError):
+ FastACP.create_agentic_acp()
+
+ def test_invalid_acp_type_string(self):
+ """Test that invalid ACP type string raises ValueError"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ with pytest.raises(ValueError):
+ asyncio.run(FastACP.create("invalid_type"))
+
+ def test_invalid_agentic_type_in_config(self):
+ """Test that invalid agentic type in config raises ValueError"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ # This should raise ValueError during config creation
+ with pytest.raises(ValueError):
+ AgenticACPConfig(type="invalid_agentic_type")
+
+ @pytest.mark.asyncio
+ async def test_unsupported_acp_type_enum(self):
+ """Test handling of unsupported ACP type enum values"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ # Create a mock enum value that's not supported
+ with patch("agentex.sdk.fastacp.fastacp.ACPType") as mock_enum:
+ mock_enum.SYNC = "sync"
+ mock_enum.AGENTIC = "agentic"
+ unsupported_type = "unsupported"
+
+ with pytest.raises(ValueError, match="Unsupported ACP type"):
+ FastACP.create(unsupported_type)
+
+
+class TestErrorHandling:
+ """Test error handling scenarios"""
+
+ @pytest.mark.asyncio
+ async def test_sync_acp_creation_failure(self):
+ """Test handling of SyncACP creation failure"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ with patch.object(SyncACP, "create", side_effect=Exception("Creation failed")):
+ with pytest.raises(Exception, match="Creation failed"):
+ FastACP.create_sync_acp()
+
+ @pytest.mark.asyncio
+ async def test_agentic_acp_creation_failure(self):
+ """Test handling of AgenticACP creation failure"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="base")
+
+ with patch.object(AgenticBaseACP, "create", side_effect=Exception("Creation failed")):
+ with pytest.raises(Exception, match="Creation failed"):
+ FastACP.create_agentic_acp(config=config)
+
+ @pytest.mark.asyncio
+ async def test_temporal_acp_creation_failure(self):
+ """Test handling of TemporalACP creation failure"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ config = AgenticACPConfig(type="temporal")
+
+ with patch.object(
+ TemporalACP, "create", side_effect=Exception("Temporal connection failed")
+ ):
+ with pytest.raises(Exception, match="Temporal connection failed"):
+ FastACP.create_agentic_acp(config=config)
+
+
+class TestIntegrationScenarios:
+ """Test integration scenarios and real-world usage patterns"""
+
+ @pytest.mark.asyncio
+ async def test_create_all_acp_types(self):
+ """Test creating all supported ACP types"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ # Create SyncACP
+ sync_acp = FastACP.create("sync")
+ assert isinstance(sync_acp, SyncACP)
+
+ # Create AgenticBaseACP
+ base_config = AgenticACPConfig(type="base")
+ agentic_base = FastACP.create("agentic", config=base_config)
+ assert isinstance(agentic_base, AgenticBaseACP)
+
+ # Create TemporalACP (mocked)
+ temporal_config = AgenticACPConfig(type="temporal")
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ temporal_acp = FastACP.create("agentic", config=temporal_config)
+ assert temporal_acp == mock_temporal_instance
+
+ @pytest.mark.asyncio
+ async def test_configuration_driven_creation(self):
+ """Test configuration-driven ACP creation"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ configs = [
+ ("sync", None),
+ ("agentic", AgenticACPConfig(type="base")),
+ ("agentic", TemporalACPConfig(type="temporal", temporal_address="localhost:7233")),
+ ]
+
+ created_acps = []
+
+ for acp_type, config in configs:
+ if acp_type == "agentic" and config.type == "temporal":
+ # Mock temporal creation
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_create.return_value = mock_temporal_instance
+
+ acp = FastACP.create(acp_type, config=config)
+ created_acps.append(acp)
+ else:
+ acp = FastACP.create(acp_type, config=config)
+ created_acps.append(acp)
+
+ assert len(created_acps) == 3
+ assert isinstance(created_acps[0], SyncACP)
+ assert isinstance(created_acps[1], AgenticBaseACP)
+ # Third one is mocked TemporalACP
+
+ @pytest.mark.asyncio
+ async def test_factory_with_custom_kwargs(self):
+ """Test factory methods with custom keyword arguments"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ # Test sync with kwargs
+ sync_acp = FastACP.create_sync_acp(custom_param="test")
+ assert isinstance(sync_acp, SyncACP)
+
+ # Test agentic base with kwargs
+ config = AgenticACPConfig(type="base")
+ agentic_acp = FastACP.create_agentic_acp(config=config, custom_param="test")
+ assert isinstance(agentic_acp, AgenticBaseACP)
diff --git a/src/agentex/lib/sdk/fastacp/tests/test_integration.py b/src/agentex/lib/sdk/fastacp/tests/test_integration.py
new file mode 100644
index 000000000..dad390034
--- /dev/null
+++ b/src/agentex/lib/sdk/fastacp/tests/test_integration.py
@@ -0,0 +1,477 @@
+import asyncio
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import httpx
+import pytest
+
+from agentex.lib.sdk.fastacp.impl.agentic_base_acp import AgenticBaseACP
+from agentex.lib.sdk.fastacp.impl.sync_acp import SyncACP
+from agentex.lib.sdk.fastacp.impl.temporal_acp import TemporalACP
+from agentex.lib.types.acp import (
+ CancelTaskParams,
+ CreateTaskParams,
+ RPCMethod,
+ SendEventParams,
+)
+
+
+class TestImplementationBehavior:
+ """Test specific behavior differences between ACP implementations"""
+
+ @pytest.mark.asyncio()
+ async def test_sync_acp_default_handlers(self):
+ """Test SyncACP has expected default handlers"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = SyncACP.create()
+
+ # Should have send_message_message handler by default
+ assert RPCMethod.MESSAGE_SEND in sync_acp._handlers
+
+ @pytest.mark.asyncio()
+ async def test_agentic_acp_default_handlers(self):
+ """Test AgenticBaseACP has expected default handlers"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ agentic_acp = AgenticBaseACP.create()
+
+ # Should have create, message, and cancel handlers by default
+ assert RPCMethod.TASK_CREATE in agentic_acp._handlers
+ assert RPCMethod.EVENT_SEND in agentic_acp._handlers
+ assert RPCMethod.TASK_CANCEL in agentic_acp._handlers
+
+ @pytest.mark.asyncio()
+ async def test_temporal_acp_creation_with_mocked_client(self):
+ """Test TemporalACP creation with mocked temporal client"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ with patch.object(TemporalACP, "create", new_callable=AsyncMock) as mock_create:
+ mock_temporal_instance = MagicMock(spec=TemporalACP)
+ mock_temporal_instance._handlers = {}
+ mock_temporal_instance.temporal_client = MagicMock()
+ mock_create.return_value = mock_temporal_instance
+
+ temporal_acp = await TemporalACP.create()
+
+ assert temporal_acp == mock_temporal_instance
+ assert hasattr(temporal_acp, "temporal_client")
+
+
+class TestRealWorldScenarios:
+ """Test real-world usage scenarios and integration"""
+
+ @pytest.mark.asyncio()
+ async def test_message_handling_workflow(self, sync_acp, free_port, test_server_runner):
+ """Test complete message handling workflow"""
+ messages_received = []
+
+ @sync_acp.on_task_event_send
+ async def message_handler(params: SendEventParams):
+ messages_received.append(
+ {
+ "task_id": params.task.id,
+ "message_content": params.message.content,
+ "author": params.message.author,
+ }
+ )
+ return {"processed": True}
+
+ runner = test_server_runner(sync_acp, free_port)
+ await runner.start()
+
+ # Send multiple messages
+ async with httpx.AsyncClient() as client:
+ for i in range(3):
+ request_data = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {
+ "id": f"workflow-task-{i}",
+ "agent_id": "workflow-agent",
+ "status": "RUNNING",
+ },
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": f"Workflow message {i}",
+ },
+ },
+ "id": f"workflow-{i}",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=request_data)
+ assert response.status_code == 200
+
+ # Give background tasks time to process
+ await asyncio.sleep(0.2)
+
+ # Verify all messages were processed
+ assert len(messages_received) == 3
+ for i, msg in enumerate(messages_received):
+ assert msg["task_id"] == f"workflow-task-{i}"
+ assert msg["message_content"] == f"Workflow message {i}"
+ assert msg["author"] == "user"
+
+ await runner.stop()
+
+ @pytest.mark.asyncio()
+ async def test_task_lifecycle_management(self, agentic_base_acp, free_port, test_server_runner):
+ """Test complete task lifecycle: create -> message -> cancel"""
+ task_events = []
+
+ @agentic_base_acp.on_task_create
+ async def create_handler(params: CreateTaskParams):
+ task_events.append(("created", params.task.id))
+
+ @agentic_base_acp.on_task_event_send
+ async def message_handler(params: SendEventParams):
+ task_events.append(("message", params.task.id))
+
+ @agentic_base_acp.on_task_cancel
+ async def cancel_handler(params: CancelTaskParams):
+ task_events.append(("cancelled", params.task_id))
+
+ runner = test_server_runner(agentic_base_acp, free_port)
+ await runner.start()
+
+ async with httpx.AsyncClient() as client:
+ # Create task
+ create_request = {
+ "jsonrpc": "2.0",
+ "method": "task/create",
+ "params": {
+ "task": {
+ "id": "lifecycle-task",
+ "agent_id": "lifecycle-agent",
+ "status": "RUNNING",
+ }
+ },
+ "id": "create-1",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=create_request)
+ assert response.status_code == 200
+
+ # Send message
+ message_request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {
+ "id": "lifecycle-task",
+ "agent_id": "lifecycle-agent",
+ "status": "RUNNING",
+ },
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "Lifecycle test message",
+ },
+ },
+ "id": "message-1",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=message_request)
+ assert response.status_code == 200
+
+ # Cancel task
+ cancel_request = {
+ "jsonrpc": "2.0",
+ "method": "task/cancel",
+ "params": {"task_id": "lifecycle-task"},
+ "id": "cancel-1",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=cancel_request)
+ assert response.status_code == 200
+
+ # Give background tasks time to process
+ await asyncio.sleep(0.2)
+
+ # Verify task lifecycle events
+ assert len(task_events) == 3
+ assert task_events[0] == ("created", "lifecycle-task")
+ assert task_events[1] == ("message", "lifecycle-task")
+ assert task_events[2] == ("cancelled", "lifecycle-task")
+
+ await runner.stop()
+
+
+class TestErrorRecovery:
+ """Test error handling and recovery scenarios"""
+
+ @pytest.mark.asyncio()
+ async def test_server_resilience_to_handler_failures(
+ self, sync_acp, free_port, test_server_runner
+ ):
+ """Test server continues working after handler failures"""
+ failure_count = 0
+ success_count = 0
+
+ @sync_acp.on_task_event_send
+ async def unreliable_handler(params: SendEventParams):
+ nonlocal failure_count, success_count
+ if "fail" in params.message.content:
+ failure_count += 1
+ raise RuntimeError("Simulated handler failure")
+ else:
+ success_count += 1
+ return {"success": True}
+
+ runner = test_server_runner(sync_acp, free_port)
+ await runner.start()
+
+ async with httpx.AsyncClient() as client:
+ # Send failing request
+ fail_request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "fail-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {"type": "text", "author": "user", "content": "This should fail"},
+ },
+ "id": "fail-1",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=fail_request)
+ assert response.status_code == 200 # Server should still respond
+
+ # Send successful request after failure
+ success_request = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "success-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {"type": "text", "author": "user", "content": "This should succeed"},
+ },
+ "id": "success-1",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=success_request)
+ assert response.status_code == 200
+
+ # Verify server is still healthy
+ health_response = await client.get(f"http://127.0.0.1:{free_port}/healthz")
+ assert health_response.status_code == 200
+
+ # Give background tasks time to process
+ await asyncio.sleep(0.2)
+
+ assert failure_count == 1
+ assert success_count == 1
+
+ await runner.stop()
+
+ @pytest.mark.asyncio()
+ async def test_concurrent_request_handling(self, sync_acp, free_port, test_server_runner):
+ """Test handling multiple concurrent requests"""
+ processed_requests = []
+
+ @sync_acp.on_task_event_send
+ async def concurrent_handler(params: SendEventParams):
+ # Simulate some processing time
+ await asyncio.sleep(0.05)
+ processed_requests.append(params.task.id)
+ return {"processed": params.task.id}
+
+ runner = test_server_runner(sync_acp, free_port)
+ await runner.start()
+
+ # Send multiple concurrent requests
+ async def send_request(client, task_id):
+ request_data = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": task_id, "agent_id": "concurrent-agent", "status": "RUNNING"},
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": f"Concurrent message for {task_id}",
+ },
+ },
+ "id": f"concurrent-{task_id}",
+ }
+
+ return await client.post(f"http://127.0.0.1:{free_port}/api", json=request_data)
+
+ async with httpx.AsyncClient() as client:
+ # Send 5 concurrent requests
+ tasks = [send_request(client, f"task-{i}") for i in range(5)]
+ responses = await asyncio.gather(*tasks)
+
+ # All should return immediate acknowledgment
+ for response in responses:
+ assert response.status_code == 200
+ data = response.json()
+ assert data["result"]["status"] == "processing"
+
+ # Give background tasks time to complete
+ await asyncio.sleep(0.3)
+
+ # All requests should have been processed
+ assert len(processed_requests) == 5
+ assert set(processed_requests) == {f"task-{i}" for i in range(5)}
+
+ await runner.stop()
+
+
+class TestSpecialCases:
+ """Test edge cases and special scenarios"""
+
+ @pytest.mark.asyncio()
+ async def test_notification_vs_request_behavior(self, sync_acp, free_port, test_server_runner):
+ """Test difference between notifications (no ID) and requests (with ID)"""
+ notifications_received = 0
+ requests_received = 0
+
+ @sync_acp.on_task_event_send
+ async def tracking_handler(params: SendEventParams):
+ nonlocal notifications_received, requests_received
+ if "notification" in params.message.content:
+ notifications_received += 1
+ else:
+ requests_received += 1
+ return {"handled": True}
+
+ runner = test_server_runner(sync_acp, free_port)
+ await runner.start()
+
+ async with httpx.AsyncClient() as client:
+ # Send notification (no ID)
+ notification_data = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {
+ "id": "notification-task",
+ "agent_id": "test-agent",
+ "status": "RUNNING",
+ },
+ "message": {
+ "type": "text",
+ "author": "user",
+ "content": "This is a notification",
+ },
+ },
+ # Note: no "id" field
+ }
+
+ notification_response = await client.post(
+ f"http://127.0.0.1:{free_port}/api", json=notification_data
+ )
+ assert notification_response.status_code == 200
+ notification_result = notification_response.json()
+ assert notification_result["id"] is None
+
+ # Send regular request (with ID)
+ request_data = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {"id": "request-task", "agent_id": "test-agent", "status": "RUNNING"},
+ "message": {"type": "text", "author": "user", "content": "This is a request"},
+ },
+ "id": "request-1",
+ }
+
+ request_response = await client.post(
+ f"http://127.0.0.1:{free_port}/api", json=request_data
+ )
+ assert request_response.status_code == 200
+ request_result = request_response.json()
+ assert request_result["id"] == "request-1"
+ assert request_result["result"]["status"] == "processing"
+
+ # Give background tasks time to process
+ await asyncio.sleep(0.1)
+
+ assert notifications_received == 1
+ assert requests_received == 1
+
+ await runner.stop()
+
+ @pytest.mark.asyncio()
+ async def test_unicode_message_handling(self, sync_acp, free_port, test_server_runner):
+ """Test handling of unicode characters in messages"""
+ received_message = None
+
+ @sync_acp.on_task_event_send
+ async def unicode_handler(params: SendEventParams):
+ nonlocal received_message
+ received_message = params.message.content
+ return {"unicode_handled": True}
+
+ runner = test_server_runner(sync_acp, free_port)
+ await runner.start()
+
+ unicode_text = "Hello 世界 🌍 émojis 🚀 and special chars: \n\t\r"
+
+ async with httpx.AsyncClient() as client:
+ request_data = {
+ "jsonrpc": "2.0",
+ "method": "event/send",
+ "params": {
+ "task": {
+ "id": "unicode-task",
+ "agent_id": "unicode-agent",
+ "status": "RUNNING",
+ },
+ "message": {"type": "text", "author": "user", "content": unicode_text},
+ },
+ "id": "unicode-test",
+ }
+
+ response = await client.post(f"http://127.0.0.1:{free_port}/api", json=request_data)
+
+ assert response.status_code == 200
+
+ # Give background task time to process
+ await asyncio.sleep(0.1)
+
+ assert received_message == unicode_text
+
+ await runner.stop()
+
+
+class TestImplementationIsolation:
+ """Test that different implementations don't interfere with each other"""
+
+ @pytest.mark.asyncio()
+ async def test_handler_isolation_between_implementations(self):
+ """Test handlers registered on one implementation don't affect others"""
+ with patch.dict("os.environ", {"AGENTEX_BASE_URL": ""}):
+ sync_acp = SyncACP.create()
+ agentic_acp = AgenticBaseACP.create()
+
+ sync_handled = False
+ agentic_handled = False
+
+ @sync_acp.on_task_event_send
+ async def sync_handler(params: SendEventParams):
+ nonlocal sync_handled
+ sync_handled = True
+ return {"sync": True}
+
+ @agentic_acp.on_task_event_send
+ async def agentic_handler(params: SendEventParams):
+ nonlocal agentic_handled
+ agentic_handled = True
+ return {"agentic": True}
+
+ # Create test parameters
+ message_params = SendEventParams(
+ task={"id": "isolation-test-task", "agent_id": "test-agent", "status": "RUNNING"},
+ message={"type": "text", "author": "user", "content": "Isolation test"},
+ )
+
+ # Execute sync handler
+ sync_result = await sync_acp._handlers[RPCMethod.EVENT_SEND](message_params)
+ assert sync_handled is True
+ assert agentic_handled is False
+ assert sync_result == {"sync": True}
+
+ # Reset and execute agentic handler
+ sync_handled = False
+ agentic_result = await agentic_acp._handlers[RPCMethod.EVENT_SEND](message_params)
+ assert sync_handled is False
+ assert agentic_handled is True
+ assert agentic_result == {"agentic": True}
diff --git a/src/agentex/lib/sdk/state_machine/__init__.py b/src/agentex/lib/sdk/state_machine/__init__.py
new file mode 100644
index 000000000..9f247d547
--- /dev/null
+++ b/src/agentex/lib/sdk/state_machine/__init__.py
@@ -0,0 +1,6 @@
+from .noop_workflow import NoOpWorkflow
+from .state import State
+from .state_machine import StateMachine
+from .state_workflow import StateWorkflow
+
+__all__ = ["StateMachine", "StateWorkflow", "State", "NoOpWorkflow"]
diff --git a/src/agentex/lib/sdk/state_machine/noop_workflow.py b/src/agentex/lib/sdk/state_machine/noop_workflow.py
new file mode 100644
index 000000000..aa6fe59a2
--- /dev/null
+++ b/src/agentex/lib/sdk/state_machine/noop_workflow.py
@@ -0,0 +1,21 @@
+from pydantic import BaseModel
+
+from agentex.lib.sdk.state_machine.state_workflow import StateWorkflow
+from agentex.lib.utils.logging import make_logger
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from agentex.lib.sdk.state_machine import StateMachine
+
+logger = make_logger(__name__)
+
+
+class NoOpWorkflow(StateWorkflow):
+ """
+ Workflow that does nothing. This is commonly used as a terminal state.
+ """
+
+ async def execute(
+ self, state_machine: "StateMachine", state_machine_data: BaseModel | None = None
+ ) -> str:
+ pass
diff --git a/src/agentex/lib/sdk/state_machine/state.py b/src/agentex/lib/sdk/state_machine/state.py
new file mode 100644
index 000000000..6ddddc0c0
--- /dev/null
+++ b/src/agentex/lib/sdk/state_machine/state.py
@@ -0,0 +1,10 @@
+from pydantic import BaseModel, ConfigDict
+
+from agentex.lib.sdk.state_machine.state_workflow import StateWorkflow
+
+
+class State(BaseModel):
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+
+ name: str
+ workflow: StateWorkflow
diff --git a/src/agentex/lib/sdk/state_machine/state_machine.py b/src/agentex/lib/sdk/state_machine/state_machine.py
new file mode 100644
index 000000000..7ca54b7bc
--- /dev/null
+++ b/src/agentex/lib/sdk/state_machine/state_machine.py
@@ -0,0 +1,189 @@
+from abc import ABC, abstractmethod
+from typing import Any, Generic, TypeVar
+
+from agentex.lib import adk
+from agentex.lib.sdk.state_machine.state import State
+from agentex.lib.sdk.state_machine.state_workflow import StateWorkflow
+from agentex.lib.utils.model_utils import BaseModel
+
+T = TypeVar("T", bound=BaseModel)
+
+
+class StateMachine(ABC, Generic[T]):
+ def __init__(
+ self,
+ initial_state: str,
+ states: list[State],
+ task_id: str | None = None,
+ state_machine_data: T | None = None,
+ trace_transitions: bool = False,
+ ):
+ self._task_id = task_id
+ self._state_map: dict[str, State] = {state.name: state for state in states}
+ self.state_machine_data = state_machine_data
+ self._initial_state = initial_state
+ self._trace_transitions = trace_transitions
+
+ # Validate that initial state exists
+ if initial_state not in self._state_map:
+ raise ValueError(f"Initial state '{initial_state}' not found in states")
+ self._current_state = self._state_map[initial_state]
+
+ def set_task_id(self, task_id: str):
+ self._task_id = task_id
+
+ def get_current_state(self) -> str:
+ return self._current_state.name
+
+ def get_current_workflow(self) -> StateWorkflow:
+ """
+ Get the workflow of the current state.
+
+ Returns:
+ The workflow of the current state
+
+ Raises:
+ ValueError: If the current state is not found in the state map
+ """
+ current_state = self._state_map.get(self.get_current_state())
+ if not current_state:
+ raise ValueError(f"State {self.get_current_state()} not found")
+ return current_state.workflow
+
+ async def transition(self, target_state_name: str):
+ if not self._state_map.get(target_state_name):
+ raise ValueError(f"State {target_state_name} not found")
+ self._current_state = self._state_map[target_state_name]
+
+ def get_state_machine_data(self) -> T:
+ return self.state_machine_data
+
+ @abstractmethod
+ async def terminal_condition(self) -> bool:
+ pass
+
+ # Overwrite this if you want to add more logic to the state machine
+ async def run(self):
+ while not await self.terminal_condition():
+ await self.step()
+
+ async def step(self) -> str:
+ current_state_name = self.get_current_state()
+ current_state = self._state_map.get(current_state_name)
+
+ if self._trace_transitions:
+ if self._task_id is None:
+ raise ValueError(
+ "Task ID is must be set before tracing can be enabled"
+ )
+ span = await adk.tracing.start_span(
+ trace_id=self._task_id,
+ name="state_transition",
+ input=self.state_machine_data.model_dump(),
+ data={"input_state": current_state_name},
+ )
+
+ next_state_name = await current_state.workflow.execute(
+ state_machine=self, state_machine_data=self.state_machine_data
+ )
+
+ if self._trace_transitions:
+ if self._task_id is None:
+ raise ValueError(
+ "Task ID is must be set before tracing can be enabled"
+ )
+ span.output = self.state_machine_data.model_dump()
+ span.data["output_state"] = next_state_name
+ await adk.tracing.end_span(trace_id=self._task_id, span=span)
+
+ await self.transition(next_state_name)
+
+ return next_state_name
+
+ async def reset_to_initial_state(self):
+ """
+ Reset the state machine to its initial state.
+ """
+ if self._trace_transitions:
+ if self._task_id is None:
+ raise ValueError(
+ "Task ID is must be set before tracing can be enabled"
+ )
+ span = await adk.tracing.start_span(
+ trace_id=self._task_id,
+ name="state_transition_reset",
+ input={"input_state": self.get_current_state()},
+ )
+
+ await self.transition(self._initial_state)
+
+ if self._trace_transitions:
+ span.output = {"output_state": self._initial_state}
+ await adk.tracing.end_span(trace_id=self._task_id, span=span)
+
+ def dump(self) -> dict[str, Any]:
+ """
+ Save the current state of the state machine to a serializable dictionary.
+ This includes the current state, task_id, state machine data, and initial state.
+
+ Returns:
+ Dict[str, Any]: A dictionary containing the serialized state machine state
+ """
+ return {
+ "task_id": self._task_id,
+ "current_state": self.get_current_state(),
+ "initial_state": self._initial_state,
+ "state_machine_data": self.state_machine_data.model_dump(mode="json")
+ if self.state_machine_data
+ else None,
+ "trace_transitions": self._trace_transitions,
+ }
+
+ @classmethod
+ async def load(cls, data: dict[str, Any], states: list[State]) -> "StateMachine[T]":
+ """
+ Load a state machine from a previously saved dictionary.
+
+ Args:
+ data: The dictionary containing the saved state machine state
+ states: List of all possible states
+
+ Returns:
+ StateMachine: A new state machine instance restored to the saved state
+
+ Raises:
+ ValueError: If the data is invalid or missing required fields
+ """
+ try:
+ task_id = data.get("task_id")
+ current_state_name = data.get("current_state")
+ initial_state = data.get("initial_state")
+ state_machine_data_dict = data.get("state_machine_data")
+ trace_transitions = data.get("trace_transitions")
+
+ if initial_state is None:
+ raise ValueError("Initial state not found in saved data")
+
+ # Reconstruct the state machine data into its Pydantic model
+ state_machine_data = None
+ if state_machine_data_dict is not None:
+ # Get the actual model type from the class's type parameters
+ model_type = cls.__orig_bases__[0].__args__[0]
+ state_machine_data = model_type.model_validate(state_machine_data_dict)
+
+ # Create a new instance
+ instance = cls(
+ initial_state=initial_state,
+ states=states,
+ task_id=task_id,
+ state_machine_data=state_machine_data,
+ trace_transitions=trace_transitions,
+ )
+
+ # If there's a saved state, transition to it
+ if current_state_name:
+ await instance.transition(target_state_name=current_state_name)
+
+ return instance
+ except Exception as e:
+ raise ValueError(f"Failed to restore state machine: {str(e)}") from e
diff --git a/src/agentex/lib/sdk/state_machine/state_workflow.py b/src/agentex/lib/sdk/state_machine/state_workflow.py
new file mode 100644
index 000000000..403d23cd9
--- /dev/null
+++ b/src/agentex/lib/sdk/state_machine/state_workflow.py
@@ -0,0 +1,16 @@
+from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel
+
+# Import StateMachine only for type checking to avoid circular imports
+if TYPE_CHECKING:
+ from agentex.lib.sdk.state_machine import StateMachine
+
+
+class StateWorkflow(ABC):
+ @abstractmethod
+ async def execute(
+ self, state_machine: "StateMachine", state_machine_data: BaseModel | None = None
+ ) -> str:
+ pass
diff --git a/src/agentex/lib/sdk/utils/__init__.py b/src/agentex/lib/sdk/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/sdk/utils/messages.py b/src/agentex/lib/sdk/utils/messages.py
new file mode 100644
index 000000000..98dd7cb95
--- /dev/null
+++ b/src/agentex/lib/sdk/utils/messages.py
@@ -0,0 +1,223 @@
+import json
+from abc import ABC, abstractmethod
+from typing import Any, Literal, override
+
+from agentex.lib.types.llm_messages import (
+ AssistantMessage,
+ Message,
+ ToolCall,
+ ToolCallRequest,
+ ToolMessage,
+ UserMessage,
+)
+from agentex.types.data_content import DataContent
+from agentex.types.task_message import TaskMessage
+from agentex.types.text_content import TextContent
+from agentex.types.tool_request_content import ToolRequestContent
+from agentex.types.tool_response_content import ToolResponseContent
+
+
+class TaskMessageConverter(ABC):
+ """
+ Abstract base class for converting a specific type of TaskMessage to an LLM Message.
+
+ Each converter should be responsible for one content type.
+ """
+
+ @abstractmethod
+ def convert(self, task_message: TaskMessage) -> Message:
+ """
+ Convert a TaskMessage to an LLM Message.
+
+ Args:
+ task_message: The TaskMessage to convert
+
+ Returns:
+ A Message (Pydantic model)
+ """
+ pass
+
+
+class DefaultTextContentConverter(TaskMessageConverter):
+ """Converter for TEXT content type."""
+
+ @override
+ def convert(self, task_message: TaskMessage) -> Message:
+ """Convert TEXT content to UserMessage or AssistantMessage based on author."""
+ if not isinstance(task_message.content, TextContent):
+ raise ValueError(f"Expected TextContent, got {type(task_message.content)}")
+ content = task_message.content
+ if content.author == "user":
+ return UserMessage(content=content.content)
+ else: # AGENT or custom author
+ return AssistantMessage(content=content.content)
+
+
+class DefaultToolRequestConverter(TaskMessageConverter):
+ """Converter for TOOL_REQUEST content type."""
+
+ @override
+ def convert(self, task_message: TaskMessage) -> Message:
+ """Convert TOOL_REQUEST content to AssistantMessage with tool_calls."""
+ if not isinstance(task_message.content, ToolRequestContent):
+ raise ValueError(f"Expected ToolRequestContent, got {type(task_message.content)}")
+
+ content = task_message.content
+
+ # Ensure arguments are properly JSON serialized
+ arguments_str = json.dumps(content.arguments)
+
+ tool_call = ToolCallRequest(
+ id=content.tool_call_id,
+ function=ToolCall(name=content.name, arguments=arguments_str),
+ )
+ return AssistantMessage(content=None, tool_calls=[tool_call])
+
+
+class DefaultToolResponseConverter(TaskMessageConverter):
+ """Converter for TOOL_RESPONSE content type."""
+
+ @override
+ def convert(self, task_message: TaskMessage) -> Message:
+ """Convert TOOL_RESPONSE content to ToolMessage."""
+ if not isinstance(task_message.content, ToolResponseContent):
+ raise ValueError(f"Expected ToolResponseContent, got {type(task_message.content)}")
+
+ content = task_message.content
+ return ToolMessage(
+ content=str(content.content),
+ tool_call_id=content.tool_call_id,
+ name=content.name,
+ )
+
+
+class DefaultDataContentConverter(TaskMessageConverter):
+ """Converter for DATA content type."""
+
+ @override
+ def convert(self, task_message: TaskMessage) -> Message:
+ """Convert DATA content to UserMessage or AssistantMessage based on author."""
+ if not isinstance(task_message.content, DataContent):
+ raise ValueError(f"Expected DataContent, got {type(task_message.content)}")
+
+ content = task_message.content
+ content_str = str(content.data)
+ if content.author == "user":
+ return UserMessage(content=content_str)
+ else: # AGENT or custom author
+ return AssistantMessage(content=content_str)
+
+
+class DefaultUnknownContentConverter(TaskMessageConverter):
+ """Converter for unknown content types."""
+
+ @override
+ def convert(self, task_message: TaskMessage) -> Message:
+ """Convert unknown content types to AssistantMessage with fallback text."""
+
+ content = task_message.content
+ fallback_content = f"Unknown message type: {content.type}"
+ return AssistantMessage(content=fallback_content)
+
+
+def convert_task_message_to_llm_messages(
+ task_message: TaskMessage,
+ output_mode: Literal["pydantic", "dict"] = "pydantic",
+ text_converter: TaskMessageConverter | None = None,
+ tool_request_converter: TaskMessageConverter | None = None,
+ tool_response_converter: TaskMessageConverter | None = None,
+ data_converter: TaskMessageConverter | None = None,
+ unknown_converter: TaskMessageConverter | None = None,
+) -> Message | dict[str, Any]:
+ """
+ Convert a TaskMessage to an LLM Message format.
+
+ Args:
+ task_message: The TaskMessage to convert
+ output_mode: Whether to return a Pydantic model or dict
+ text_converter: Optional converter for TEXT content. Uses DefaultTextContentConverter if None.
+ tool_request_converter: Optional converter for TOOL_REQUEST content. Uses DefaultToolRequestConverter if None.
+ tool_response_converter: Optional converter for TOOL_RESPONSE content. Uses DefaultToolResponseConverter if None.
+ data_converter: Optional converter for DATA content. Uses DefaultDataContentConverter if None.
+ unknown_converter: Optional converter for unknown content. Uses DefaultUnknownContentConverter if None.
+
+ Returns:
+ Either a Message (Pydantic model) or dict representation
+ """
+ content = task_message.content
+
+ # Get the appropriate converter for this content type
+ if content.type == "text":
+ converter = (
+ text_converter
+ if text_converter is not None
+ else DefaultTextContentConverter()
+ )
+ elif content.type == "tool_request":
+ converter = (
+ tool_request_converter
+ if tool_request_converter is not None
+ else DefaultToolRequestConverter()
+ )
+ elif content.type == "tool_response":
+ converter = (
+ tool_response_converter
+ if tool_response_converter is not None
+ else DefaultToolResponseConverter()
+ )
+ elif content.type == "data":
+ converter = (
+ data_converter
+ if data_converter is not None
+ else DefaultDataContentConverter()
+ )
+ else:
+ converter = (
+ unknown_converter
+ if unknown_converter is not None
+ else DefaultUnknownContentConverter()
+ )
+
+ message = converter.convert(task_message)
+
+ if output_mode == "dict":
+ return message.model_dump()
+ return message
+
+
+def convert_task_messages_to_llm_messages(
+ task_messages: list[TaskMessage],
+ output_mode: Literal["pydantic", "dict"] = "pydantic",
+ text_converter: TaskMessageConverter | None = None,
+ tool_request_converter: TaskMessageConverter | None = None,
+ tool_response_converter: TaskMessageConverter | None = None,
+ data_converter: TaskMessageConverter | None = None,
+ unknown_converter: TaskMessageConverter | None = None,
+) -> list[Message | dict[str, Any]]:
+ """
+ Convert a list of TaskMessages to LLM Message format.
+
+ Args:
+ task_messages: List of TaskMessages to convert
+ output_mode: Whether to return Pydantic models or dicts
+ text_converter: Optional converter for TEXT content. Uses DefaultTextContentConverter if None.
+ tool_request_converter: Optional converter for TOOL_REQUEST content. Uses DefaultToolRequestConverter if None.
+ tool_response_converter: Optional converter for TOOL_RESPONSE content. Uses DefaultToolResponseConverter if None.
+ data_converter: Optional converter for DATA content. Uses DefaultDataContentConverter if None.
+ unknown_converter: Optional converter for unknown content. Uses DefaultUnknownContentConverter if None.
+
+ Returns:
+ List of either Messages (Pydantic models) or dicts
+ """
+ return [
+ convert_task_message_to_llm_messages(
+ task_message,
+ output_mode,
+ text_converter,
+ tool_request_converter,
+ tool_response_converter,
+ data_converter,
+ unknown_converter,
+ )
+ for task_message in task_messages
+ ]
diff --git a/src/agentex/lib/types/__init__.py b/src/agentex/lib/types/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/types/acp.py b/src/agentex/lib/types/acp.py
new file mode 100644
index 000000000..4ec008c87
--- /dev/null
+++ b/src/agentex/lib/types/acp.py
@@ -0,0 +1,94 @@
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from agentex.types.agent import Agent
+from agentex.types.event import Event
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.types.task import Task
+
+
+class RPCMethod(str, Enum):
+ """Available JSON-RPC methods for agent communication."""
+
+ EVENT_SEND = "event/send"
+ MESSAGE_SEND = "message/send"
+ TASK_CANCEL = "task/cancel"
+ TASK_CREATE = "task/create"
+
+
+class CreateTaskParams(BaseModel):
+ """Parameters for task/create method.
+
+ Attributes:
+ agent: The agent that the task was sent to.
+ task: The task to be created.
+ params: The parameters for the task as inputted by the user.
+ """
+
+ agent: Agent = Field(..., description="The agent that the task was sent to")
+ task: Task = Field(..., description="The task to be created")
+ params: dict[str, Any] | None = Field(
+ None,
+ description="The parameters for the task as inputted by the user",
+ )
+
+
+class SendMessageParams(BaseModel):
+ """Parameters for message/send method.
+
+ Attributes:
+ agent: The agent that the message was sent to.
+ task: The task that the message was sent to.
+ content: The message that was sent to the agent.
+ stream: Whether to stream the message back to the agentex server from the agent.
+ """
+
+ agent: Agent = Field(..., description="The agent that the message was sent to")
+ task: Task = Field(..., description="The task that the message was sent to")
+ content: TaskMessageContent = Field(
+ ..., description="The message that was sent to the agent"
+ )
+ stream: bool = Field(
+ False,
+ description="Whether to stream the message back to the agentex server from the agent",
+ )
+
+
+class SendEventParams(BaseModel):
+ """Parameters for event/send method.
+
+ Attributes:
+ agent: The agent that the event was sent to.
+ task: The task that the message was sent to.
+ event: The event that was sent to the agent.
+ """
+
+ agent: Agent = Field(..., description="The agent that the event was sent to")
+ task: Task = Field(..., description="The task that the message was sent to")
+ event: Event = Field(..., description="The event that was sent to the agent")
+
+
+class CancelTaskParams(BaseModel):
+ """Parameters for task/cancel method.
+
+ Attributes:
+ agent: The agent that the task was sent to.
+ task: The task that was cancelled.
+ """
+
+ agent: Agent = Field(..., description="The agent that the task was sent to")
+ task: Task = Field(..., description="The task that was cancelled")
+
+
+RPC_SYNC_METHODS = [
+ RPCMethod.MESSAGE_SEND,
+]
+
+PARAMS_MODEL_BY_METHOD: dict[RPCMethod, type[BaseModel]] = {
+ RPCMethod.EVENT_SEND: SendEventParams,
+ RPCMethod.TASK_CANCEL: CancelTaskParams,
+ RPCMethod.MESSAGE_SEND: SendMessageParams,
+ RPCMethod.TASK_CREATE: CreateTaskParams,
+}
diff --git a/src/agentex/lib/types/agent_configs.py b/src/agentex/lib/types/agent_configs.py
new file mode 100644
index 000000000..8d9548a84
--- /dev/null
+++ b/src/agentex/lib/types/agent_configs.py
@@ -0,0 +1,79 @@
+from pydantic import BaseModel, Field, model_validator, validator
+
+
+class TemporalWorkflowConfig(BaseModel):
+ """
+ Configuration for the temporal workflow that defines the agent.
+
+ Attributes:
+ name: The name of the temporal workflow that defines the agent.
+ queue_name: The name of the temporal queue to send tasks to.
+ """
+
+ name: str = Field(
+ ..., description="The name of the temporal workflow that defines the agent."
+ )
+ queue_name: str = Field(
+ ..., description="The name of the temporal queue to send tasks to."
+ )
+
+
+# TODO: Remove this class when we remove the agentex agents create
+class TemporalWorkerConfig(BaseModel):
+ """
+ Configuration for temporal worker deployment
+
+ Attributes:
+ image: The image to use for the temporal worker
+ workflow: The temporal workflow configuration
+ """
+
+ image: str | None = Field(
+ default=None, description="Image to use for the temporal worker"
+ )
+ workflow: TemporalWorkflowConfig | None = Field(
+ default=None,
+ description="Configuration for the temporal workflow that defines the agent. Only required for agents that leverage Temporal.",
+ )
+
+
+class TemporalConfig(BaseModel):
+ """
+ Simplified temporal configuration for agents
+
+ Attributes:
+ enabled: Whether this agent uses Temporal workflows
+ workflow: The temporal workflow configuration
+ workflows: The list of temporal workflow configurations
+ """
+
+ enabled: bool = Field(
+ default=False, description="Whether this agent uses Temporal workflows"
+ )
+ workflow: TemporalWorkflowConfig | None = Field(
+ default=None,
+ description="Temporal workflow configuration. Required when enabled=True. (deprecated: use workflows instead)",
+ )
+ workflows: list[TemporalWorkflowConfig] | None = Field(
+ default=None,
+ description="List of temporal workflow configurations. Used when enabled=true.",
+ )
+
+ @validator("workflows")
+ def validate_workflows_not_empty(cls, v):
+ """Ensure workflows list is not empty when provided"""
+ if v is not None and len(v) == 0:
+ raise ValueError("workflows list cannot be empty when provided")
+ return v
+
+ @model_validator(mode="after")
+ def validate_temporal_config_when_enabled(self):
+ """Validate that workflow configuration exists when enabled=true"""
+ if self.enabled:
+ # Must have either workflow (legacy) or workflows (new)
+ if not self.workflow and (not self.workflows or len(self.workflows) == 0):
+ raise ValueError(
+ "When temporal.enabled=true, either 'workflow' or 'workflows' must be provided and non-empty"
+ )
+
+ return self
diff --git a/src/agentex/lib/types/agent_results.py b/src/agentex/lib/types/agent_results.py
new file mode 100644
index 000000000..9b3703e0c
--- /dev/null
+++ b/src/agentex/lib/types/agent_results.py
@@ -0,0 +1,29 @@
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class SerializableRunResult(BaseModel):
+ """
+ Serializable version of RunResult.
+
+ Attributes:
+ final_output: The final output of the run.
+ final_input_list: The final input list of the run.
+ """
+
+ final_output: Any
+ final_input_list: list[dict[str, Any]]
+
+
+class SerializableRunResultStreaming(BaseModel):
+ """
+ Serializable version of RunResultStreaming.
+
+ Attributes:
+ final_output: The final output of the run.
+ final_input_list: The final input list of the run.
+ """
+
+ final_output: Any
+ final_input_list: list[dict[str, Any]]
diff --git a/src/agentex/lib/types/credentials.py b/src/agentex/lib/types/credentials.py
new file mode 100644
index 000000000..719165128
--- /dev/null
+++ b/src/agentex/lib/types/credentials.py
@@ -0,0 +1,34 @@
+from pydantic import BaseModel, Field
+
+
+class CredentialMapping(BaseModel):
+ """Maps a Kubernetes secret to an environment variable in the agent container.
+
+ This allows agents to securely access credentials stored in Kubernetes secrets
+ by mapping them to environment variables. For example, you can map a secret
+ containing an API key to an environment variable that your agent code expects.
+
+ Example:
+ A mapping of {"env_var_name": "OPENAI_API_KEY",
+ "secret_name": "ai-credentials",
+ "secret_key": "openai-key"}
+ will make the value from the "openai-key" field in the "ai-credentials"
+ Kubernetes secret available to the agent as OPENAI_API_KEY environment variable.
+
+ Attributes:
+ env_var_name: The name of the environment variable that will be available to the agent
+ secret_name: The name of the Kubernetes secret containing the credential
+ secret_key: The key within the Kubernetes secret that contains the credential value
+ """
+
+ env_var_name: str = Field(
+ ...,
+ description="Name of the environment variable that will be available to the agent",
+ )
+ secret_name: str = Field(
+ ..., description="Name of the Kubernetes secret containing the credential"
+ )
+ secret_key: str = Field(
+ ...,
+ description="Key within the Kubernetes secret that contains the credential value",
+ )
diff --git a/src/agentex/lib/types/fastacp.py b/src/agentex/lib/types/fastacp.py
new file mode 100644
index 000000000..743e7e4a4
--- /dev/null
+++ b/src/agentex/lib/types/fastacp.py
@@ -0,0 +1,61 @@
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class BaseACPConfig(BaseModel):
+ """
+ Base configuration for all ACP implementations
+
+ Attributes:
+ type: The type of ACP implementation
+ """
+
+ pass
+
+
+class SyncACPConfig(BaseACPConfig):
+ """
+ Configuration for SyncACP implementation
+
+ Attributes:
+ type: The type of ACP implementation
+ """
+
+ pass
+
+
+class AgenticACPConfig(BaseACPConfig):
+ """
+ Base class for agentic ACP configurations
+
+ Attributes:
+ type: The type of ACP implementation
+ """
+
+ type: Literal["temporal", "base"] = Field(..., frozen=True)
+
+
+class TemporalACPConfig(AgenticACPConfig):
+ """
+ Configuration for TemporalACP implementation
+
+ Attributes:
+ type: The type of ACP implementation
+ temporal_address: The address of the temporal server
+ """
+
+ type: Literal["temporal"] = Field(default="temporal", frozen=True)
+ temporal_address: str = Field(
+ default="temporal-frontend.temporal.svc.cluster.local:7233", frozen=True
+ )
+
+
+class AgenticBaseACPConfig(AgenticACPConfig):
+ """Configuration for AgenticBaseACP implementation
+
+ Attributes:
+ type: The type of ACP implementation
+ """
+
+ type: Literal["base"] = Field(default="base", frozen=True)
diff --git a/src/agentex/lib/types/files.py b/src/agentex/lib/types/files.py
new file mode 100644
index 000000000..ddf104dd2
--- /dev/null
+++ b/src/agentex/lib/types/files.py
@@ -0,0 +1,13 @@
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class FileContentResponse(BaseModel):
+ """Response model for downloaded file content.
+
+ Attributes:
+ mime_type: The MIME type of the file
+ base64_content: The base64 encoded content of the file
+ """
+
+ mime_type: str
+ base64_content: str
diff --git a/src/agentex/lib/types/json_rpc.py b/src/agentex/lib/types/json_rpc.py
new file mode 100644
index 000000000..15db316d8
--- /dev/null
+++ b/src/agentex/lib/types/json_rpc.py
@@ -0,0 +1,49 @@
+from typing import Any, Literal
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class JSONRPCError(BaseModel):
+ """JSON-RPC 2.0 Error
+
+ Attributes:
+ code: The error code
+ message: The error message
+ data: The error data
+ """
+
+ code: int
+ message: str
+ data: Any | None = None
+
+
+class JSONRPCRequest(BaseModel):
+ """JSON-RPC 2.0 Request
+
+ Attributes:
+ jsonrpc: The JSON-RPC version
+ method: The method to call
+ params: The parameters for the request
+ id: The ID of the request
+ """
+
+ jsonrpc: Literal["2.0"] = "2.0"
+ method: str
+ params: dict[str, Any]
+ id: int | str | None = None
+
+
+class JSONRPCResponse(BaseModel):
+ """JSON-RPC 2.0 Response
+
+ Attributes:
+ jsonrpc: The JSON-RPC version
+ result: The result of the request
+ error: The error of the request
+ id: The ID of the request
+ """
+
+ jsonrpc: Literal["2.0"] = "2.0"
+ result: dict[str, Any] | None = None
+ error: JSONRPCError | None = None
+ id: int | str | None = None
diff --git a/src/agentex/lib/types/llm_messages.py b/src/agentex/lib/types/llm_messages.py
new file mode 100644
index 000000000..fa23a047e
--- /dev/null
+++ b/src/agentex/lib/types/llm_messages.py
@@ -0,0 +1,354 @@
+from typing import Any, Literal
+
+try:
+ from typing import Annotated
+except ImportError:
+ from typing import Annotated
+from pydantic import Field
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class LLMConfig(BaseModel):
+ """
+ LLMConfig is the configuration for the LLM.
+
+ Attributes:
+ model: The model to use
+ messages: The messages to send to the LLM
+ temperature: The temperature to use
+ top_p: The top_p to use
+ n: The number of completions to generate
+ stream: Whether to stream the completions
+ stream_options: The options for the stream
+ stop: The stop sequence to use
+ max_tokens: The maximum number of tokens to generate
+ max_completion_tokens: The maximum number of tokens to generate for the completion
+ presence_penalty: The presence penalty to use
+ frequency_penalty: The frequency penalty to use
+ logit_bias: The logit bias to use
+ response_format: The response format to use
+ seed: The seed to use
+ tools: The tools to use
+ tool_choice: The tool choice to use
+ parallel_tool_calls: Whether to allow parallel tool calls
+ logprobs: Whether to return log probabilities
+ top_logprobs: The number of top log probabilities to return
+ """
+
+ model: str
+ messages: list = []
+ temperature: float | None = None
+ top_p: float | None = None
+ n: int | None = None
+ stream: bool | None = None
+ stream_options: dict | None = None
+ stop: str | list | None = None
+ max_tokens: int | None = None
+ max_completion_tokens: int | None = None
+ presence_penalty: float | None = None
+ frequency_penalty: float | None = None
+ logit_bias: dict | None = None
+ response_format: dict | type[BaseModel] | str | None = None
+ seed: int | None = None
+ tools: list | None = None
+ tool_choice: str | None = None
+ parallel_tool_calls: bool | None = None
+ logprobs: bool | None = None
+ top_logprobs: int | None = None
+
+
+class ContentPartText(BaseModel):
+ """
+ ContentPartText is the text content of the message.
+
+ Attributes:
+ text: The text content.
+ type: The type of the content part.
+ """
+
+ text: str = Field(..., description="The text content.")
+ type: Literal["text"] = Field(
+ default="text", description="The type of the content part."
+ )
+
+
+class ImageURL(BaseModel):
+ """
+ ImageURL is the URL of the image.
+
+ Attributes:
+ url: The URL of the image.
+ detail: The detail level of the image.
+ """
+
+ url: str = Field(
+ ..., description="Either a URL of the image or the base64 encoded image data."
+ )
+ detail: Literal["auto", "low", "high"] = Field(
+ ...,
+ description="""Specifies the detail level of the image.
+
+Learn more in the
+[Vision guide](https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding).
+""",
+ )
+
+
+class ContentPartImage(BaseModel):
+ """
+ ContentPartImage is the image content of the message.
+
+ Attributes:
+ image_url: The URL of the image.
+ type: The type of the content part.
+ """
+
+ image_url: ImageURL = Field(..., description="The image URL.")
+ type: Literal["image_url"] = Field(..., description="The type of the content part.")
+
+
+class FileContent(BaseModel):
+ """
+ FileContent is the file content of the message.
+
+ Attributes:
+ filename: The name of the file.
+ file_data: The base64 encoded file data with MIME type, e.g., 'data:application/pdf;base64,...'
+ """
+
+ filename: str = Field(..., description="The name of the file.")
+ file_data: str = Field(
+ ...,
+ description="The base64 encoded file data with MIME type, e.g., 'data:application/pdf;base64,...'",
+ )
+
+
+class ContentPartFile(BaseModel):
+ """
+ ContentPartFile is the file content of the message.
+
+ Attributes:
+ file: The file content.
+ type: The type of the content part.
+ """
+
+ file: FileContent = Field(..., description="The file content.")
+ type: Literal["file"] = Field(
+ default="file", description="The type of the content part."
+ )
+
+
+ContentPart = ContentPartText | ContentPartImage | ContentPartFile
+
+
+class SystemMessage(BaseModel):
+ """
+ SystemMessage is the system message of the message.
+
+ Attributes:
+ role: The role of the messages author, in this case `system`.
+ content: The contents of the system message.
+ """
+
+ role: Literal["system"] = Field(
+ default="system",
+ description="The role of the messages author, in this case `system`.",
+ )
+ content: str = Field(..., description="The contents of the system message.")
+
+
+class UserMessage(BaseModel):
+ """
+ UserMessage is the user message of the message.
+
+ Attributes:
+ role: The role of the messages author, in this case `user`.
+ content: The contents of the user message.
+ """
+
+ role: Literal["user"] = Field(
+ default="user",
+ description="The role of the messages author, in this case `user`.",
+ )
+ content: str | list[ContentPart] = Field(
+ ...,
+ description="The contents of the user message. Can be a string or a list of content parts.",
+ )
+
+
+class ToolCall(BaseModel):
+ """
+ ToolCall is the tool call of the message.
+
+ Attributes:
+ name: The name of the function to call.
+ arguments: The arguments to call the function with, as generated by the model in JSON format.
+ """
+
+ name: str | None = Field(
+ default=None, description="The name of the function to call."
+ )
+ arguments: str | None = Field(
+ default=None,
+ description="""
+The arguments to call the function with, as generated by the model in JSON
+format. Note that the model does not always generate valid JSON, and may
+hallucinate parameters not defined by your function schema. Validate the
+arguments in your code before calling your function.
+""",
+ )
+
+
+class ToolCallRequest(BaseModel):
+ """
+ ToolCallRequest is the tool call request of the message.
+
+ Attributes:
+ type: The type of the tool. Currently, only `function` is supported.
+ id: The ID of the tool call request.
+ function: The function that the model is requesting.
+ index: The index of the tool call request.
+ """
+
+ type: Literal["function"] = Field(
+ default="function",
+ description="The type of the tool. Currently, only `function` is supported.",
+ )
+ id: str | None = Field(default=None, description="The ID of the tool call request.")
+ function: ToolCall = Field(
+ ..., description="The function that the model is requesting."
+ )
+ index: int | None = None
+
+
+class AssistantMessage(BaseModel):
+ """
+ AssistantMessage is the assistant message of the message.
+
+ Attributes:
+ role: The role of the messages author, in this case `assistant`.
+ content: The contents of the assistant message.
+ tool_calls: The tool calls generated by the model, such as function calls.
+ parsed: The parsed content of the message to a specific type
+ """
+
+ role: Literal["assistant"] = Field(
+ default="assistant",
+ description="The role of the messages author, in this case `assistant`.",
+ )
+ content: str | None = Field(
+ default=None,
+ description="""The contents of the assistant message.
+
+Required unless `tool_calls` or `function_call` is specified.
+""",
+ )
+ tool_calls: list[ToolCallRequest] | None = Field(
+ default=None,
+ description="The tool calls generated by the model, such as function calls.",
+ )
+ parsed: Any | None = Field(
+ default=None, description="The parsed content of the message to a specific type"
+ )
+
+
+class ToolMessage(BaseModel):
+ """
+ ToolMessage is the tool message of the message.
+
+ Attributes:
+ role: The role of the messages author, in this case `tool`.
+ content: The contents of the tool message.
+ tool_call_id: The tool call that this message is responding to.
+ name: The name of the tool called.
+ is_error: Whether the tool call was successful.
+ """
+
+ role: Literal["tool"] = Field(
+ default="tool",
+ description="The role of the messages author, in this case `tool`.",
+ )
+ content: str | list[ContentPart] = Field(
+ ..., description="The contents of the tool message."
+ )
+ tool_call_id: str = Field(
+ ..., description="Tool call that this message is responding to."
+ )
+ # name is optional based on OAI API defined here for chat_completion_input: https://platform.openai.com/docs/api-reference/chat/create
+ name: str | None = Field(default=None, description="The name of the tool called.")
+ is_error: bool | None = Field(
+ default=None, description="Whether the tool call was successful."
+ )
+
+
+Message = Annotated[
+ SystemMessage | UserMessage | AssistantMessage | ToolMessage,
+ Field(discriminator="role"),
+]
+
+
+class Delta(BaseModel):
+ """
+ Delta is the delta of the message.
+
+ Attributes:
+ content: The content of the delta.
+ role: The role of the delta.
+ tool_calls: The tool calls of the delta.
+ """
+
+ content: str | None = Field(default=None)
+ role: str | None = Field(default=None)
+ tool_calls: list[ToolCallRequest] | None = Field(default=None)
+
+
+class Choice(BaseModel):
+ """
+ Choice is the choice of the message.
+
+ Attributes:
+ index: The index of the choice.
+ finish_reason: The finish reason of the choice.
+ message: The message of the choice.
+ delta: The delta of the choice.
+ """
+
+ index: int
+ finish_reason: Literal["stop", "length", "content_filter", "tool_calls"] | None = (
+ None
+ )
+ message: AssistantMessage | None = None
+ delta: Delta | None = None
+
+
+class Usage(BaseModel):
+ """
+ Usage is the usage of the message.
+
+ Attributes:
+ prompt_tokens: The number of prompt tokens.
+ completion_tokens: The number of completion tokens.
+ total_tokens: The total number of tokens.
+ """
+
+ prompt_tokens: int
+ completion_tokens: int
+ total_tokens: int
+
+
+class Completion(BaseModel):
+ """
+ Completion is the completion of the message.
+
+ Attributes:
+ choices: The choices of the completion.
+ created: The created time of the completion.
+ model: The model of the completion.
+ usage: The usage of the completion.
+ """
+
+ choices: list[Choice]
+ created: int | None = None
+ model: str | None = None
+ usage: Usage | None = None
diff --git a/src/agentex/lib/types/task_message_updates.py b/src/agentex/lib/types/task_message_updates.py
new file mode 100644
index 000000000..0030acde9
--- /dev/null
+++ b/src/agentex/lib/types/task_message_updates.py
@@ -0,0 +1,171 @@
+from enum import Enum
+from typing import Annotated, Literal
+
+from pydantic import Field, model_validator
+
+from agentex.types.task_message import TaskMessage
+from agentex.types.task_message_content import TaskMessageContent
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class BaseTaskMessageDelta(BaseModel):
+ """
+ Base class for all delta updates
+
+ Attributes:
+ type: The type of delta update
+ """
+
+ type: Literal["text", "data", "tool_request", "tool_response"]
+
+
+class TextDelta(BaseTaskMessageDelta):
+ """
+ Delta for text updates
+
+ Attributes:
+ type: The type of delta update
+ text_delta: The delta for the text
+ """
+
+ type: Literal["text"] = "text"
+ text_delta: str | None = ""
+
+
+class DataDelta(BaseTaskMessageDelta):
+ """
+ Delta for data updates
+
+ Attributes:
+ type: The type of delta update
+ data_delta: The delta for the data
+ """
+
+ type: Literal["data"] = "data"
+ data_delta: str | None = ""
+
+
+class ToolRequestDelta(BaseTaskMessageDelta):
+ """
+ Delta for tool request updates
+
+ Attributes:
+ type: The type of delta update
+ name: The name of the tool
+ arguments_delta: The delta for the arguments
+ """
+
+ type: Literal["tool_request"] = "tool_request"
+ tool_call_id: str
+ name: str
+ arguments_delta: str | None = ""
+
+
+class ToolResponseDelta(BaseTaskMessageDelta):
+ """
+ Delta for tool response updates
+
+ Attributes:
+ type: The type of delta update
+ tool_response_delta: The delta for the tool response
+ """
+
+ type: Literal["tool_response"] = "tool_response"
+ tool_call_id: str
+ name: str
+ tool_response_delta: str | None = ""
+
+
+TaskMessageDelta = Annotated[
+ TextDelta | DataDelta | ToolRequestDelta | ToolResponseDelta,
+ Field(discriminator="type"),
+]
+
+
+class StreamTaskMessage(BaseModel):
+ """Base class for all task message stream events
+
+ Attributes:
+ type: The type of task message update
+ parent_task_message: The parent task message
+ index: The index of the task message
+ """
+
+ type: Literal["start", "delta", "full", "done"]
+ # Used for streaming chunks to a direct parent_task_message
+ parent_task_message: TaskMessage | None = None
+ # Used to correlate chunks of different task messages with each other
+ # directly in the Sync ACP case
+ index: int | None = None
+
+ @model_validator(mode="after")
+ def validate_message_correlation(self):
+ """Ensure exactly one of index or parent_task_message is set"""
+ has_parent = self.parent_task_message is not None
+ has_index = self.index is not None
+
+ if not has_parent and not has_index:
+ raise ValueError("Either 'index' or 'parent_task_message' must be set")
+
+ if has_parent and has_index:
+ raise ValueError(
+ "Cannot set both 'index' and 'parent_task_message' - only one is allowed"
+ )
+
+ return self
+
+
+# Everything is streamed as a partial json blob, except for text.
+class StreamTaskMessageStart(StreamTaskMessage):
+ """Event for starting a streaming message
+
+ Attributes:
+ type: The type of task message update
+ content: The content of the task message
+ """
+
+ type: Literal["start"] = "start"
+ content: TaskMessageContent
+
+
+class StreamTaskMessageDelta(StreamTaskMessage):
+ """Event for streaming chunks of content
+
+ Attributes:
+ type: The type of task message update
+ delta: The delta of the task message
+ """
+
+ type: Literal["delta"] = "delta"
+ delta: TaskMessageDelta | None = None
+
+
+class StreamTaskMessageFull(StreamTaskMessage):
+ """Event for streaming the full content
+
+ Attributes:
+ type: The type of task message update
+ content: The content of the task message
+ """
+
+ type: Literal["full"] = "full"
+ content: TaskMessageContent
+
+
+class StreamTaskMessageDone(StreamTaskMessage):
+ """Event for indicating the task is done
+
+ Attributes:
+ type: The type of task message update
+ """
+
+ type: Literal["done"] = "done"
+
+
+TaskMessageUpdate = Annotated[
+ StreamTaskMessageStart
+ | StreamTaskMessageDelta
+ | StreamTaskMessageFull
+ | StreamTaskMessageDone,
+ Field(discriminator="type"),
+]
diff --git a/src/agentex/lib/types/tracing.py b/src/agentex/lib/types/tracing.py
new file mode 100644
index 000000000..f9a0af3d5
--- /dev/null
+++ b/src/agentex/lib/types/tracing.py
@@ -0,0 +1,34 @@
+from typing import Annotated, Literal
+
+from pydantic import Field
+
+from agentex.lib.utils.model_utils import BaseModel
+
+
+class BaseModelWithTraceParams(BaseModel):
+ """
+ Base model with trace parameters.
+
+ Attributes:
+ trace_id: The trace ID
+ parent_span_id: The parent span ID
+ """
+
+ trace_id: str | None = None
+ parent_span_id: str | None = None
+
+
+class AgentexTracingProcessorConfig(BaseModel):
+ type: Literal["agentex"] = "agentex"
+
+
+class SGPTracingProcessorConfig(BaseModel):
+ type: Literal["sgp"] = "sgp"
+ sgp_api_key: str
+ sgp_account_id: str
+
+
+TracingProcessorConfig = Annotated[
+ AgentexTracingProcessorConfig | SGPTracingProcessorConfig,
+ Field(discriminator="type"),
+]
diff --git a/src/agentex/lib/utils/__init__.py b/src/agentex/lib/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/agentex/lib/utils/completions.py b/src/agentex/lib/utils/completions.py
new file mode 100644
index 000000000..89114dcb8
--- /dev/null
+++ b/src/agentex/lib/utils/completions.py
@@ -0,0 +1,131 @@
+from copy import deepcopy
+from functools import reduce, singledispatch
+from typing import Any
+
+from agentex.lib.types.llm_messages import (
+ Choice,
+ Completion,
+ Delta,
+ ToolCall,
+ ToolCallRequest,
+)
+
+
+@singledispatch
+def _concat_chunks(a: None, b: Any):
+ return b
+
+
+@_concat_chunks.register
+def _(a: Completion, b: Completion) -> Completion:
+ a.choices = [_concat_chunks(*c) for c in zip(a.choices, b.choices, strict=False)]
+ return a
+
+
+@_concat_chunks.register
+def _(a: Choice, b: Choice) -> Choice:
+ if hasattr(a, "index") and hasattr(b, "index"):
+ assert a.index == b.index
+
+ if hasattr(a, "delta") and hasattr(b, "delta"):
+ a.delta = _concat_chunks(a.delta, b.delta)
+
+ a.finish_reason = a.finish_reason or b.finish_reason
+ return a
+
+
+@_concat_chunks.register
+def _(a: Delta, b: Delta) -> Delta:
+ a.content = a.content + b.content if a.content and b.content else a.content or b.content
+
+ if hasattr(a, "tool_calls") and hasattr(b, "tool_calls") and a.tool_calls and b.tool_calls:
+ # Group tool calls by index
+ grouped_tool_calls = {}
+ for tool_call in a.tool_calls + b.tool_calls:
+ if tool_call.index not in grouped_tool_calls:
+ grouped_tool_calls[tool_call.index] = tool_call
+ else:
+ grouped_tool_calls[tool_call.index] = _concat_chunks(
+ grouped_tool_calls[tool_call.index], tool_call
+ )
+
+ a.tool_calls = list(grouped_tool_calls.values())
+ elif hasattr(b, "tool_calls") and b.tool_calls:
+ a.tool_calls = b.tool_calls
+
+ return a
+
+
+@_concat_chunks.register
+def _(a: ToolCallRequest, b: ToolCallRequest) -> ToolCallRequest:
+ # Preserve id from either a or b, with preference for a
+ id_val = a.id if a.id is not None else b.id
+
+ # Use index from either a or b, with preference for a's index
+ index_val = a.index if hasattr(a, "index") and a.index is not None else b.index
+
+ # Concatenate the function part
+ function_val = (
+ _concat_chunks(a.function, b.function)
+ if a.function and b.function
+ else a.function or b.function
+ )
+
+ # Set all properties
+ a.id = id_val
+ a.index = index_val
+ a.function = function_val
+
+ return a
+
+
+@_concat_chunks.register
+def _(a: ToolCall, b: ToolCall) -> ToolCall:
+ # Preserve name from either a or b, with preference for a
+ name_val = a.name or b.name
+
+ # Concatenate arguments string
+ args_val = ""
+ if a.arguments is not None and b.arguments is not None:
+ args_val = a.arguments + b.arguments
+ else:
+ args_val = a.arguments or b.arguments
+
+ # Set all properties
+ a.name = name_val
+ a.arguments = args_val
+
+ return a
+
+
+def concat_completion_chunks(chunks: list[Completion]) -> Completion:
+ """
+ Accumulates all chunks returned from a streaming completion call into a `Completion` message.
+ This is useful when you stream responses from an LLM and want to keep track of the context (i.e. previous messages + current message).
+
+ Args:
+ chunks: list of completion chunks returned from streamed completion
+ Returns:
+ Completion: same as type returned from non-streaming completion
+
+
+
+ To implement `concat_completion_chunks` we first implement a binary `_concat_chunks` function for each
+ type. Using `singledispatch` to dispatch the call to the appropriate function based on the type of the first argument.
+ Each nested type is then concatenated. We can then use reduce to accumulate the entire stream into a single a
+ single `CompletionChunk`. Finally we convert the type to the appropriate non-streaming type `Completion` and return it.
+ """
+ if not chunks:
+ return None
+
+ chunks_copy = chunks.copy()
+ chunks_copy[0] = deepcopy(chunks_copy[0]) # _concat_chunks mutates first argument
+ accumulated_chunks = reduce(_concat_chunks, chunks_copy)
+
+ data = accumulated_chunks.model_dump()
+ data["object"] = "chat.completion"
+ choices = data["choices"]
+ for choice in choices:
+ choice["message"] = choice.pop("delta")
+
+ return Completion.model_validate(data)
diff --git a/src/agentex/lib/utils/console.py b/src/agentex/lib/utils/console.py
new file mode 100644
index 000000000..17f89198a
--- /dev/null
+++ b/src/agentex/lib/utils/console.py
@@ -0,0 +1,14 @@
+from rich import box
+from rich.console import Console
+from rich.table import Table
+
+console = Console()
+
+
+def print_section(name: str, contents: list[str], subtitle: str | None = None):
+ console.print()
+ table = Table(box=box.SQUARE, caption=subtitle, show_header=False, expand=True)
+ table.title = name
+ table.add_column(name, style="dim", width=12)
+ table.add_row(*contents)
+ console.print(table)
diff --git a/src/agentex/lib/utils/io.py b/src/agentex/lib/utils/io.py
new file mode 100644
index 000000000..11e2378a7
--- /dev/null
+++ b/src/agentex/lib/utils/io.py
@@ -0,0 +1,29 @@
+from typing import Any
+
+import yaml
+from yaml.scanner import ScannerError
+
+
+class InvalidYAMLError(ValueError):
+ """
+ Raised when trying to red a YAML file, but the file is not formatted correctly.
+ """
+
+
+def load_yaml_file(file_path: str) -> dict[str, Any]:
+ """
+ Loads a YAML file from the specified path.
+
+ :param file_path: The path of the YAML file to load.
+ :type file_path: str
+ :return: The contents of the YAML file.
+ :rtype: dict
+ """
+ try:
+ with open(file_path) as file:
+ yaml_dict = yaml.safe_load(file)
+ return yaml_dict
+ except ScannerError as error:
+ raise InvalidYAMLError(
+ f"The following file is not in valid YAML format: {file_path}"
+ ) from error
diff --git a/src/agentex/lib/utils/iterables.py b/src/agentex/lib/utils/iterables.py
new file mode 100644
index 000000000..31a275964
--- /dev/null
+++ b/src/agentex/lib/utils/iterables.py
@@ -0,0 +1,14 @@
+from collections.abc import AsyncGenerator
+from typing import Any
+
+
+async def async_enumerate(
+ aiterable: AsyncGenerator, start: int = 0
+) -> AsyncGenerator[tuple[int, Any], None]:
+ """
+ Enumerate an async generator.
+ """
+ i = start
+ async for item in aiterable:
+ yield i, item
+ i += 1
diff --git a/src/agentex/lib/utils/json_schema.py b/src/agentex/lib/utils/json_schema.py
new file mode 100644
index 000000000..da34b6d4b
--- /dev/null
+++ b/src/agentex/lib/utils/json_schema.py
@@ -0,0 +1,23 @@
+from typing import Any
+
+import jsonref
+from jsonschema import validate as schema_validation
+
+
+def resolve_refs(schema: dict) -> dict:
+ """
+ Resolve JSON references in a schema.
+ """
+ resolved = jsonref.replace_refs(schema, proxies=False, lazy_load=False)
+ serializable = {
+ "type": resolved.get("type"),
+ "properties": resolved.get("properties"),
+ "required": list(resolved.get("required", [])),
+ "additionalProperties": resolved.get("additionalProperties", False),
+ }
+ return serializable
+
+
+def validate_payload(json_schema: dict[str, Any], payload: dict[str, Any]) -> None:
+ """Validate the payload against the JSON schema."""
+ schema_validation(instance=payload, schema=json_schema)
diff --git a/src/agentex/lib/utils/logging.py b/src/agentex/lib/utils/logging.py
new file mode 100644
index 000000000..26d9abcdd
--- /dev/null
+++ b/src/agentex/lib/utils/logging.py
@@ -0,0 +1,31 @@
+import logging
+
+from rich.console import Console
+from rich.logging import RichHandler
+
+
+def make_logger(name: str):
+ """
+ Creates a logger object with a RichHandler to print colored text.
+ :param name: The name of the module to create the logger for.
+ :return: A logger object.
+ """
+ # Create a console object to print colored text
+ console = Console()
+
+ # Create a logger object with the name of the current module
+ logger = logging.getLogger(name)
+
+ # Set the global log level to INFO
+ logger.setLevel(logging.INFO)
+
+ # Add the RichHandler to the logger to print colored text
+ handler = RichHandler(
+ console=console,
+ show_level=False,
+ show_path=False,
+ show_time=False,
+ )
+ logger.addHandler(handler)
+
+ return logger
diff --git a/src/agentex/lib/utils/mcp.py b/src/agentex/lib/utils/mcp.py
new file mode 100644
index 000000000..b95126a6b
--- /dev/null
+++ b/src/agentex/lib/utils/mcp.py
@@ -0,0 +1,17 @@
+from typing import Any
+from mcp import StdioServerParameters
+
+
+def redact_mcp_server_params(
+ mcp_server_params: list[StdioServerParameters],
+) -> list[dict[str, Any]]:
+ """Redact MCP server params for logging."""
+ return [
+ {
+ **{k: v for k, v in server_param.model_dump().items() if k != "env"},
+ "env": dict.fromkeys(server_param.env, "********")
+ if server_param.env
+ else None,
+ }
+ for server_param in mcp_server_params
+ ]
diff --git a/src/agentex/lib/utils/model_utils.py b/src/agentex/lib/utils/model_utils.py
new file mode 100644
index 000000000..a4f68c845
--- /dev/null
+++ b/src/agentex/lib/utils/model_utils.py
@@ -0,0 +1,46 @@
+from collections.abc import Iterable, Mapping
+from typing import Any, TypeVar
+
+from pydantic import BaseModel as PydanticBaseModel
+from pydantic import ConfigDict
+
+from agentex.lib.utils.io import load_yaml_file
+
+T = TypeVar("T", bound="BaseModel")
+
+
+class BaseModel(PydanticBaseModel):
+ model_config = ConfigDict(from_attributes=True, populate_by_name=True)
+
+ @classmethod
+ def from_yaml(cls: type[T], file_path: str) -> T:
+ """
+ Returns an instance of this class by deserializing from a YAML file.
+
+ :param file_path: The path to the YAML file.
+ :return: An instance of this class.
+ """
+ yaml_dict = load_yaml_file(file_path=file_path)
+ class_object = cls.model_validate(yaml_dict)
+ return class_object
+
+ def to_json(self, *args, **kwargs) -> str:
+ return self.model_dump_json(*args, **kwargs)
+
+ def to_dict(self, *args, **kwargs) -> dict[str, Any]:
+ return recursive_model_dump(self)
+
+
+def recursive_model_dump(obj: Any) -> Any:
+ if isinstance(obj, PydanticBaseModel):
+ # Serialize BaseModel to dict
+ return obj.model_dump(mode="json")
+ elif isinstance(obj, Mapping):
+ # Recursively serialize dictionary values
+ return {k: recursive_model_dump(v) for k, v in obj.items()}
+ elif isinstance(obj, Iterable) and not isinstance(obj, str | bytes):
+ # Recursively serialize items in lists, tuples, sets, etc.
+ return [recursive_model_dump(item) for item in obj]
+ else:
+ # Return primitive types as-is
+ return obj
diff --git a/src/agentex/lib/utils/parsing.py b/src/agentex/lib/utils/parsing.py
new file mode 100644
index 000000000..ecb61206a
--- /dev/null
+++ b/src/agentex/lib/utils/parsing.py
@@ -0,0 +1,15 @@
+from urllib.parse import urlsplit, urlunsplit
+
+
+def remove_query_params(url):
+ split_url = urlsplit(url)
+ scheme, netloc, path, query, fragment = split_url
+
+ if query:
+ query = ''
+ else:
+ amp_index = path.find('&')
+ if amp_index != -1:
+ path = path[:amp_index]
+
+ return urlunsplit((scheme, netloc, path, query, fragment))
diff --git a/src/agentex/lib/utils/regex.py b/src/agentex/lib/utils/regex.py
new file mode 100644
index 000000000..c760b10dd
--- /dev/null
+++ b/src/agentex/lib/utils/regex.py
@@ -0,0 +1,6 @@
+import re
+
+
+def camel_to_snake(camel_case_str: str) -> str:
+ # Substitute capital letters with an underscore followed by the lowercase letter
+ return re.sub(r'(? NameResource:
- return NameResource(self._client)
-
@cached_property
def with_raw_response(self) -> AgentsResourceWithRawResponse:
"""
@@ -160,12 +150,78 @@ def delete(
cast_to=Agent,
)
+ def delete_by_name(
+ self,
+ agent_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Agent:
+ """
+ Delete an agent by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return self._delete(
+ f"/agents/name/{agent_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Agent,
+ )
+
+ def retrieve_by_name(
+ self,
+ agent_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Agent:
+ """
+ Get an agent by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return self._get(
+ f"/agents/name/{agent_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Agent,
+ )
+
def rpc(
self,
agent_id: str,
*,
method: Literal["event/send", "task/create", "message/send", "task/cancel"],
- params: agent_rpc_params.Params,
+ params: AgentRpcParams,
id: Union[int, str, None] | NotGiven = NOT_GIVEN,
jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -174,11 +230,13 @@ def rpc(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
+ ) -> AgentRpcResponse:
"""
Handle JSON-RPC requests for an agent by its unique ID.
Args:
+ params: The parameters for the agent RPC request
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -203,15 +261,59 @@ def rpc(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=object,
+ cast_to=AgentRpcResponse,
)
+ def rpc_by_name(
+ self,
+ agent_name: str,
+ *,
+ method: Literal["event/send", "task/create", "message/send", "task/cancel"],
+ params: AgentRpcParams,
+ id: Union[int, str, None] | NotGiven = NOT_GIVEN,
+ jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentRpcResponse:
+ """
+ Handle JSON-RPC requests for an agent by its unique name.
+
+ Args:
+ params: The parameters for the agent RPC request
-class AsyncAgentsResource(AsyncAPIResource):
- @cached_property
- def name(self) -> AsyncNameResource:
- return AsyncNameResource(self._client)
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return self._post(
+ f"/agents/name/{agent_name}/rpc",
+ body=maybe_transform(
+ {
+ "method": method,
+ "params": params,
+ "id": id,
+ "jsonrpc": jsonrpc,
+ },
+ agent_rpc_by_name_params.AgentRpcByNameParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AgentRpcResponse,
+ )
+
+
+class AsyncAgentsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse:
"""
@@ -334,12 +436,78 @@ async def delete(
cast_to=Agent,
)
+ async def delete_by_name(
+ self,
+ agent_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Agent:
+ """
+ Delete an agent by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return await self._delete(
+ f"/agents/name/{agent_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Agent,
+ )
+
+ async def retrieve_by_name(
+ self,
+ agent_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Agent:
+ """
+ Get an agent by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return await self._get(
+ f"/agents/name/{agent_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Agent,
+ )
+
async def rpc(
self,
agent_id: str,
*,
method: Literal["event/send", "task/create", "message/send", "task/cancel"],
- params: agent_rpc_params.Params,
+ params: AgentRpcParams,
id: Union[int, str, None] | NotGiven = NOT_GIVEN,
jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -348,11 +516,13 @@ async def rpc(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
+ ) -> AgentRpcResponse:
"""
Handle JSON-RPC requests for an agent by its unique ID.
Args:
+ params: The parameters for the agent RPC request
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -377,7 +547,55 @@ async def rpc(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=object,
+ cast_to=AgentRpcResponse,
+ )
+
+ async def rpc_by_name(
+ self,
+ agent_name: str,
+ *,
+ method: Literal["event/send", "task/create", "message/send", "task/cancel"],
+ params: AgentRpcParams,
+ id: Union[int, str, None] | NotGiven = NOT_GIVEN,
+ jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentRpcResponse:
+ """
+ Handle JSON-RPC requests for an agent by its unique name.
+
+ Args:
+ params: The parameters for the agent RPC request
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_name:
+ raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
+ return await self._post(
+ f"/agents/name/{agent_name}/rpc",
+ body=await async_maybe_transform(
+ {
+ "method": method,
+ "params": params,
+ "id": id,
+ "jsonrpc": jsonrpc,
+ },
+ agent_rpc_by_name_params.AgentRpcByNameParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AgentRpcResponse,
)
@@ -394,13 +612,18 @@ def __init__(self, agents: AgentsResource) -> None:
self.delete = to_raw_response_wrapper(
agents.delete,
)
+ self.delete_by_name = to_raw_response_wrapper(
+ agents.delete_by_name,
+ )
+ self.retrieve_by_name = to_raw_response_wrapper(
+ agents.retrieve_by_name,
+ )
self.rpc = to_raw_response_wrapper(
agents.rpc,
)
-
- @cached_property
- def name(self) -> NameResourceWithRawResponse:
- return NameResourceWithRawResponse(self._agents.name)
+ self.rpc_by_name = to_raw_response_wrapper(
+ agents.rpc_by_name,
+ )
class AsyncAgentsResourceWithRawResponse:
@@ -416,13 +639,18 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
self.delete = async_to_raw_response_wrapper(
agents.delete,
)
+ self.delete_by_name = async_to_raw_response_wrapper(
+ agents.delete_by_name,
+ )
+ self.retrieve_by_name = async_to_raw_response_wrapper(
+ agents.retrieve_by_name,
+ )
self.rpc = async_to_raw_response_wrapper(
agents.rpc,
)
-
- @cached_property
- def name(self) -> AsyncNameResourceWithRawResponse:
- return AsyncNameResourceWithRawResponse(self._agents.name)
+ self.rpc_by_name = async_to_raw_response_wrapper(
+ agents.rpc_by_name,
+ )
class AgentsResourceWithStreamingResponse:
@@ -438,13 +666,18 @@ def __init__(self, agents: AgentsResource) -> None:
self.delete = to_streamed_response_wrapper(
agents.delete,
)
+ self.delete_by_name = to_streamed_response_wrapper(
+ agents.delete_by_name,
+ )
+ self.retrieve_by_name = to_streamed_response_wrapper(
+ agents.retrieve_by_name,
+ )
self.rpc = to_streamed_response_wrapper(
agents.rpc,
)
-
- @cached_property
- def name(self) -> NameResourceWithStreamingResponse:
- return NameResourceWithStreamingResponse(self._agents.name)
+ self.rpc_by_name = to_streamed_response_wrapper(
+ agents.rpc_by_name,
+ )
class AsyncAgentsResourceWithStreamingResponse:
@@ -460,10 +693,15 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
self.delete = async_to_streamed_response_wrapper(
agents.delete,
)
+ self.delete_by_name = async_to_streamed_response_wrapper(
+ agents.delete_by_name,
+ )
+ self.retrieve_by_name = async_to_streamed_response_wrapper(
+ agents.retrieve_by_name,
+ )
self.rpc = async_to_streamed_response_wrapper(
agents.rpc,
)
-
- @cached_property
- def name(self) -> AsyncNameResourceWithStreamingResponse:
- return AsyncNameResourceWithStreamingResponse(self._agents.name)
+ self.rpc_by_name = async_to_streamed_response_wrapper(
+ agents.rpc_by_name,
+ )
diff --git a/src/agentex/resources/agents/__init__.py b/src/agentex/resources/agents/__init__.py
deleted file mode 100644
index 374345e0f..000000000
--- a/src/agentex/resources/agents/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .name import (
- NameResource,
- AsyncNameResource,
- NameResourceWithRawResponse,
- AsyncNameResourceWithRawResponse,
- NameResourceWithStreamingResponse,
- AsyncNameResourceWithStreamingResponse,
-)
-from .agents import (
- AgentsResource,
- AsyncAgentsResource,
- AgentsResourceWithRawResponse,
- AsyncAgentsResourceWithRawResponse,
- AgentsResourceWithStreamingResponse,
- AsyncAgentsResourceWithStreamingResponse,
-)
-
-__all__ = [
- "NameResource",
- "AsyncNameResource",
- "NameResourceWithRawResponse",
- "AsyncNameResourceWithRawResponse",
- "NameResourceWithStreamingResponse",
- "AsyncNameResourceWithStreamingResponse",
- "AgentsResource",
- "AsyncAgentsResource",
- "AgentsResourceWithRawResponse",
- "AsyncAgentsResourceWithRawResponse",
- "AgentsResourceWithStreamingResponse",
- "AsyncAgentsResourceWithStreamingResponse",
-]
diff --git a/src/agentex/resources/agents/name.py b/src/agentex/resources/agents/name.py
deleted file mode 100644
index 8906f03e4..000000000
--- a/src/agentex/resources/agents/name.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Union
-from typing_extensions import Literal
-
-import httpx
-
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from ...types.agent import Agent
-from ..._base_client import make_request_options
-from ...types.agents import name_rpc_params
-
-__all__ = ["NameResource", "AsyncNameResource"]
-
-
-class NameResource(SyncAPIResource):
- @cached_property
- def with_raw_response(self) -> NameResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return NameResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> NameResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return NameResourceWithStreamingResponse(self)
-
- def retrieve(
- self,
- agent_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Agent:
- """
- Get an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return self._get(
- f"/agents/name/{agent_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Agent,
- )
-
- def delete(
- self,
- agent_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Agent:
- """
- Delete an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return self._delete(
- f"/agents/name/{agent_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Agent,
- )
-
- def rpc(
- self,
- agent_name: str,
- *,
- method: Literal["event/send", "task/create", "message/send", "task/cancel"],
- params: name_rpc_params.Params,
- id: Union[int, str, None] | NotGiven = NOT_GIVEN,
- jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """
- Handle JSON-RPC requests for an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return self._post(
- f"/agents/name/{agent_name}/rpc",
- body=maybe_transform(
- {
- "method": method,
- "params": params,
- "id": id,
- "jsonrpc": jsonrpc,
- },
- name_rpc_params.NameRpcParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
-
-class AsyncNameResource(AsyncAPIResource):
- @cached_property
- def with_raw_response(self) -> AsyncNameResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return AsyncNameResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncNameResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return AsyncNameResourceWithStreamingResponse(self)
-
- async def retrieve(
- self,
- agent_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Agent:
- """
- Get an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return await self._get(
- f"/agents/name/{agent_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Agent,
- )
-
- async def delete(
- self,
- agent_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Agent:
- """
- Delete an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return await self._delete(
- f"/agents/name/{agent_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Agent,
- )
-
- async def rpc(
- self,
- agent_name: str,
- *,
- method: Literal["event/send", "task/create", "message/send", "task/cancel"],
- params: name_rpc_params.Params,
- id: Union[int, str, None] | NotGiven = NOT_GIVEN,
- jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """
- Handle JSON-RPC requests for an agent by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not agent_name:
- raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}")
- return await self._post(
- f"/agents/name/{agent_name}/rpc",
- body=await async_maybe_transform(
- {
- "method": method,
- "params": params,
- "id": id,
- "jsonrpc": jsonrpc,
- },
- name_rpc_params.NameRpcParams,
- ),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
-
-class NameResourceWithRawResponse:
- def __init__(self, name: NameResource) -> None:
- self._name = name
-
- self.retrieve = to_raw_response_wrapper(
- name.retrieve,
- )
- self.delete = to_raw_response_wrapper(
- name.delete,
- )
- self.rpc = to_raw_response_wrapper(
- name.rpc,
- )
-
-
-class AsyncNameResourceWithRawResponse:
- def __init__(self, name: AsyncNameResource) -> None:
- self._name = name
-
- self.retrieve = async_to_raw_response_wrapper(
- name.retrieve,
- )
- self.delete = async_to_raw_response_wrapper(
- name.delete,
- )
- self.rpc = async_to_raw_response_wrapper(
- name.rpc,
- )
-
-
-class NameResourceWithStreamingResponse:
- def __init__(self, name: NameResource) -> None:
- self._name = name
-
- self.retrieve = to_streamed_response_wrapper(
- name.retrieve,
- )
- self.delete = to_streamed_response_wrapper(
- name.delete,
- )
- self.rpc = to_streamed_response_wrapper(
- name.rpc,
- )
-
-
-class AsyncNameResourceWithStreamingResponse:
- def __init__(self, name: AsyncNameResource) -> None:
- self._name = name
-
- self.retrieve = async_to_streamed_response_wrapper(
- name.retrieve,
- )
- self.delete = async_to_streamed_response_wrapper(
- name.delete,
- )
- self.rpc = async_to_streamed_response_wrapper(
- name.rpc,
- )
diff --git a/src/agentex/resources/echo.py b/src/agentex/resources/echo.py
deleted file mode 100644
index 5966357a9..000000000
--- a/src/agentex/resources/echo.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import httpx
-
-from ..types import echo_send_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from .._base_client import make_request_options
-
-__all__ = ["EchoResource", "AsyncEchoResource"]
-
-
-class EchoResource(SyncAPIResource):
- @cached_property
- def with_raw_response(self) -> EchoResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return EchoResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> EchoResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return EchoResourceWithStreamingResponse(self)
-
- def send(
- self,
- *,
- message: str,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """
- Echo
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return self._post(
- "/echo",
- body=maybe_transform({"message": message}, echo_send_params.EchoSendParams),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
-
-class AsyncEchoResource(AsyncAPIResource):
- @cached_property
- def with_raw_response(self) -> AsyncEchoResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return AsyncEchoResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncEchoResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return AsyncEchoResourceWithStreamingResponse(self)
-
- async def send(
- self,
- *,
- message: str,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> object:
- """
- Echo
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- return await self._post(
- "/echo",
- body=await async_maybe_transform({"message": message}, echo_send_params.EchoSendParams),
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- )
-
-
-class EchoResourceWithRawResponse:
- def __init__(self, echo: EchoResource) -> None:
- self._echo = echo
-
- self.send = to_raw_response_wrapper(
- echo.send,
- )
-
-
-class AsyncEchoResourceWithRawResponse:
- def __init__(self, echo: AsyncEchoResource) -> None:
- self._echo = echo
-
- self.send = async_to_raw_response_wrapper(
- echo.send,
- )
-
-
-class EchoResourceWithStreamingResponse:
- def __init__(self, echo: EchoResource) -> None:
- self._echo = echo
-
- self.send = to_streamed_response_wrapper(
- echo.send,
- )
-
-
-class AsyncEchoResourceWithStreamingResponse:
- def __init__(self, echo: AsyncEchoResource) -> None:
- self._echo = echo
-
- self.send = async_to_streamed_response_wrapper(
- echo.send,
- )
diff --git a/src/agentex/resources/messages/batch.py b/src/agentex/resources/messages/batch.py
index 478c2df5f..584126639 100644
--- a/src/agentex/resources/messages/batch.py
+++ b/src/agentex/resources/messages/batch.py
@@ -18,6 +18,7 @@
)
from ..._base_client import make_request_options
from ...types.messages import batch_create_params, batch_update_params
+from ...types.task_message_content_param import TaskMessageContentParam
from ...types.messages.batch_create_response import BatchCreateResponse
from ...types.messages.batch_update_response import BatchUpdateResponse
@@ -47,7 +48,7 @@ def with_streaming_response(self) -> BatchResourceWithStreamingResponse:
def create(
self,
*,
- contents: Iterable[batch_create_params.Content],
+ contents: Iterable[TaskMessageContentParam],
task_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -87,7 +88,7 @@ def update(
self,
*,
task_id: str,
- updates: Dict[str, batch_update_params.Updates],
+ updates: Dict[str, TaskMessageContentParam],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -146,7 +147,7 @@ def with_streaming_response(self) -> AsyncBatchResourceWithStreamingResponse:
async def create(
self,
*,
- contents: Iterable[batch_create_params.Content],
+ contents: Iterable[TaskMessageContentParam],
task_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -186,7 +187,7 @@ async def update(
self,
*,
task_id: str,
- updates: Dict[str, batch_update_params.Updates],
+ updates: Dict[str, TaskMessageContentParam],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
diff --git a/src/agentex/resources/messages/messages.py b/src/agentex/resources/messages/messages.py
index 7745cf76d..b7f78c941 100644
--- a/src/agentex/resources/messages/messages.py
+++ b/src/agentex/resources/messages/messages.py
@@ -3,6 +3,7 @@
from __future__ import annotations
from typing import Optional
+from typing_extensions import Literal
import httpx
@@ -14,7 +15,7 @@
BatchResourceWithStreamingResponse,
AsyncBatchResourceWithStreamingResponse,
)
-from ...types import StreamingStatus, message_list_params, message_create_params, message_update_params
+from ...types import message_list_params, message_create_params, message_update_params
from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
@@ -27,8 +28,8 @@
)
from ..._base_client import make_request_options
from ...types.task_message import TaskMessage
-from ...types.streaming_status import StreamingStatus
from ...types.message_list_response import MessageListResponse
+from ...types.task_message_content_param import TaskMessageContentParam
__all__ = ["MessagesResource", "AsyncMessagesResource"]
@@ -60,9 +61,9 @@ def with_streaming_response(self) -> MessagesResourceWithStreamingResponse:
def create(
self,
*,
- content: message_create_params.Content,
+ content: TaskMessageContentParam,
task_id: str,
- streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN,
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -135,9 +136,9 @@ def update(
self,
message_id: str,
*,
- content: message_update_params.Content,
+ content: TaskMessageContentParam,
task_id: str,
- streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN,
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -245,9 +246,9 @@ def with_streaming_response(self) -> AsyncMessagesResourceWithStreamingResponse:
async def create(
self,
*,
- content: message_create_params.Content,
+ content: TaskMessageContentParam,
task_id: str,
- streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN,
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -320,9 +321,9 @@ async def update(
self,
message_id: str,
*,
- content: message_update_params.Content,
+ content: TaskMessageContentParam,
task_id: str,
- streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN,
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
diff --git a/src/agentex/resources/tasks/tasks.py b/src/agentex/resources/tasks.py
similarity index 58%
rename from src/agentex/resources/tasks/tasks.py
rename to src/agentex/resources/tasks.py
index ba8161ed2..936ee66cf 100644
--- a/src/agentex/resources/tasks/tasks.py
+++ b/src/agentex/resources/tasks.py
@@ -4,36 +4,24 @@
import httpx
-from .name import (
- NameResource,
- AsyncNameResource,
- NameResourceWithRawResponse,
- AsyncNameResourceWithRawResponse,
- NameResourceWithStreamingResponse,
- AsyncNameResourceWithStreamingResponse,
-)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._compat import cached_property
+from .._resource import SyncAPIResource, AsyncAPIResource
+from .._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._streaming import Stream, AsyncStream
-from ...types.task import Task
-from ..._base_client import make_request_options
-from ...types.task_list_response import TaskListResponse
+from .._streaming import Stream, AsyncStream
+from ..types.task import Task
+from .._base_client import make_request_options
+from ..types.task_list_response import TaskListResponse
__all__ = ["TasksResource", "AsyncTasksResource"]
class TasksResource(SyncAPIResource):
- @cached_property
- def name(self) -> NameResource:
- return NameResource(self._client)
-
@cached_property
def with_raw_response(self) -> TasksResourceWithRawResponse:
"""
@@ -138,6 +126,72 @@ def delete(
cast_to=Task,
)
+ def delete_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Task:
+ """
+ Delete a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return self._delete(
+ f"/tasks/name/{task_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Task,
+ )
+
+ def retrieve_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Task:
+ """
+ Get a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return self._get(
+ f"/tasks/name/{task_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Task,
+ )
+
def stream_events(
self,
task_id: str,
@@ -173,12 +227,43 @@ def stream_events(
stream_cls=Stream[object],
)
+ def stream_events_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Stream[object]:
+ """
+ Stream events for a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return self._get(
+ f"/tasks/name/{task_name}/stream",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=object,
+ stream=True,
+ stream_cls=Stream[object],
+ )
-class AsyncTasksResource(AsyncAPIResource):
- @cached_property
- def name(self) -> AsyncNameResource:
- return AsyncNameResource(self._client)
+class AsyncTasksResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncTasksResourceWithRawResponse:
"""
@@ -283,6 +368,72 @@ async def delete(
cast_to=Task,
)
+ async def delete_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Task:
+ """
+ Delete a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return await self._delete(
+ f"/tasks/name/{task_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Task,
+ )
+
+ async def retrieve_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> Task:
+ """
+ Get a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return await self._get(
+ f"/tasks/name/{task_name}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Task,
+ )
+
async def stream_events(
self,
task_id: str,
@@ -318,6 +469,41 @@ async def stream_events(
stream_cls=AsyncStream[object],
)
+ async def stream_events_by_name(
+ self,
+ task_name: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncStream[object]:
+ """
+ Stream events for a task by its unique name.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_name:
+ raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
+ return await self._get(
+ f"/tasks/name/{task_name}/stream",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=object,
+ stream=True,
+ stream_cls=AsyncStream[object],
+ )
+
class TasksResourceWithRawResponse:
def __init__(self, tasks: TasksResource) -> None:
@@ -332,13 +518,18 @@ def __init__(self, tasks: TasksResource) -> None:
self.delete = to_raw_response_wrapper(
tasks.delete,
)
+ self.delete_by_name = to_raw_response_wrapper(
+ tasks.delete_by_name,
+ )
+ self.retrieve_by_name = to_raw_response_wrapper(
+ tasks.retrieve_by_name,
+ )
self.stream_events = to_raw_response_wrapper(
tasks.stream_events,
)
-
- @cached_property
- def name(self) -> NameResourceWithRawResponse:
- return NameResourceWithRawResponse(self._tasks.name)
+ self.stream_events_by_name = to_raw_response_wrapper(
+ tasks.stream_events_by_name,
+ )
class AsyncTasksResourceWithRawResponse:
@@ -354,13 +545,18 @@ def __init__(self, tasks: AsyncTasksResource) -> None:
self.delete = async_to_raw_response_wrapper(
tasks.delete,
)
+ self.delete_by_name = async_to_raw_response_wrapper(
+ tasks.delete_by_name,
+ )
+ self.retrieve_by_name = async_to_raw_response_wrapper(
+ tasks.retrieve_by_name,
+ )
self.stream_events = async_to_raw_response_wrapper(
tasks.stream_events,
)
-
- @cached_property
- def name(self) -> AsyncNameResourceWithRawResponse:
- return AsyncNameResourceWithRawResponse(self._tasks.name)
+ self.stream_events_by_name = async_to_raw_response_wrapper(
+ tasks.stream_events_by_name,
+ )
class TasksResourceWithStreamingResponse:
@@ -376,13 +572,18 @@ def __init__(self, tasks: TasksResource) -> None:
self.delete = to_streamed_response_wrapper(
tasks.delete,
)
+ self.delete_by_name = to_streamed_response_wrapper(
+ tasks.delete_by_name,
+ )
+ self.retrieve_by_name = to_streamed_response_wrapper(
+ tasks.retrieve_by_name,
+ )
self.stream_events = to_streamed_response_wrapper(
tasks.stream_events,
)
-
- @cached_property
- def name(self) -> NameResourceWithStreamingResponse:
- return NameResourceWithStreamingResponse(self._tasks.name)
+ self.stream_events_by_name = to_streamed_response_wrapper(
+ tasks.stream_events_by_name,
+ )
class AsyncTasksResourceWithStreamingResponse:
@@ -398,10 +599,15 @@ def __init__(self, tasks: AsyncTasksResource) -> None:
self.delete = async_to_streamed_response_wrapper(
tasks.delete,
)
+ self.delete_by_name = async_to_streamed_response_wrapper(
+ tasks.delete_by_name,
+ )
+ self.retrieve_by_name = async_to_streamed_response_wrapper(
+ tasks.retrieve_by_name,
+ )
self.stream_events = async_to_streamed_response_wrapper(
tasks.stream_events,
)
-
- @cached_property
- def name(self) -> AsyncNameResourceWithStreamingResponse:
- return AsyncNameResourceWithStreamingResponse(self._tasks.name)
+ self.stream_events_by_name = async_to_streamed_response_wrapper(
+ tasks.stream_events_by_name,
+ )
diff --git a/src/agentex/resources/tasks/__init__.py b/src/agentex/resources/tasks/__init__.py
deleted file mode 100644
index 5a9c81b08..000000000
--- a/src/agentex/resources/tasks/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .name import (
- NameResource,
- AsyncNameResource,
- NameResourceWithRawResponse,
- AsyncNameResourceWithRawResponse,
- NameResourceWithStreamingResponse,
- AsyncNameResourceWithStreamingResponse,
-)
-from .tasks import (
- TasksResource,
- AsyncTasksResource,
- TasksResourceWithRawResponse,
- AsyncTasksResourceWithRawResponse,
- TasksResourceWithStreamingResponse,
- AsyncTasksResourceWithStreamingResponse,
-)
-
-__all__ = [
- "NameResource",
- "AsyncNameResource",
- "NameResourceWithRawResponse",
- "AsyncNameResourceWithRawResponse",
- "NameResourceWithStreamingResponse",
- "AsyncNameResourceWithStreamingResponse",
- "TasksResource",
- "AsyncTasksResource",
- "TasksResourceWithRawResponse",
- "AsyncTasksResourceWithRawResponse",
- "TasksResourceWithStreamingResponse",
- "AsyncTasksResourceWithStreamingResponse",
-]
diff --git a/src/agentex/resources/tasks/name.py b/src/agentex/resources/tasks/name.py
deleted file mode 100644
index 7cef42eb4..000000000
--- a/src/agentex/resources/tasks/name.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import httpx
-
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from ..._streaming import Stream, AsyncStream
-from ...types.task import Task
-from ..._base_client import make_request_options
-
-__all__ = ["NameResource", "AsyncNameResource"]
-
-
-class NameResource(SyncAPIResource):
- @cached_property
- def with_raw_response(self) -> NameResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return NameResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> NameResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return NameResourceWithStreamingResponse(self)
-
- def retrieve(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Task:
- """
- Get a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return self._get(
- f"/tasks/name/{task_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Task,
- )
-
- def delete(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Task:
- """
- Delete a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return self._delete(
- f"/tasks/name/{task_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Task,
- )
-
- def stream_events(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Stream[object]:
- """
- Stream events for a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return self._get(
- f"/tasks/name/{task_name}/stream",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- stream=True,
- stream_cls=Stream[object],
- )
-
-
-class AsyncNameResource(AsyncAPIResource):
- @cached_property
- def with_raw_response(self) -> AsyncNameResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers
- """
- return AsyncNameResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncNameResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response
- """
- return AsyncNameResourceWithStreamingResponse(self)
-
- async def retrieve(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Task:
- """
- Get a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return await self._get(
- f"/tasks/name/{task_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Task,
- )
-
- async def delete(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Task:
- """
- Delete a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return await self._delete(
- f"/tasks/name/{task_name}",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=Task,
- )
-
- async def stream_events(
- self,
- task_name: str,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsyncStream[object]:
- """
- Stream events for a task by its unique name.
-
- Args:
- extra_headers: Send extra headers
-
- extra_query: Add additional query parameters to the request
-
- extra_body: Add additional JSON properties to the request
-
- timeout: Override the client-level default timeout for this request, in seconds
- """
- if not task_name:
- raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}")
- return await self._get(
- f"/tasks/name/{task_name}/stream",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=object,
- stream=True,
- stream_cls=AsyncStream[object],
- )
-
-
-class NameResourceWithRawResponse:
- def __init__(self, name: NameResource) -> None:
- self._name = name
-
- self.retrieve = to_raw_response_wrapper(
- name.retrieve,
- )
- self.delete = to_raw_response_wrapper(
- name.delete,
- )
- self.stream_events = to_raw_response_wrapper(
- name.stream_events,
- )
-
-
-class AsyncNameResourceWithRawResponse:
- def __init__(self, name: AsyncNameResource) -> None:
- self._name = name
-
- self.retrieve = async_to_raw_response_wrapper(
- name.retrieve,
- )
- self.delete = async_to_raw_response_wrapper(
- name.delete,
- )
- self.stream_events = async_to_raw_response_wrapper(
- name.stream_events,
- )
-
-
-class NameResourceWithStreamingResponse:
- def __init__(self, name: NameResource) -> None:
- self._name = name
-
- self.retrieve = to_streamed_response_wrapper(
- name.retrieve,
- )
- self.delete = to_streamed_response_wrapper(
- name.delete,
- )
- self.stream_events = to_streamed_response_wrapper(
- name.stream_events,
- )
-
-
-class AsyncNameResourceWithStreamingResponse:
- def __init__(self, name: AsyncNameResource) -> None:
- self._name = name
-
- self.retrieve = async_to_streamed_response_wrapper(
- name.retrieve,
- )
- self.delete = async_to_streamed_response_wrapper(
- name.delete,
- )
- self.stream_events = async_to_streamed_response_wrapper(
- name.stream_events,
- )
diff --git a/src/agentex/types/__init__.py b/src/agentex/types/__init__.py
index 17dc3aa2e..26d1ecc8e 100644
--- a/src/agentex/types/__init__.py
+++ b/src/agentex/types/__init__.py
@@ -8,32 +8,38 @@
from .event import Event as Event
from .state import State as State
from .acp_type import AcpType as AcpType
+from .data_delta import DataDelta as DataDelta
+from .text_delta import TextDelta as TextDelta
from .data_content import DataContent as DataContent
from .task_message import TaskMessage as TaskMessage
from .text_content import TextContent as TextContent
from .message_style import MessageStyle as MessageStyle
from .message_author import MessageAuthor as MessageAuthor
from .agent_rpc_params import AgentRpcParams as AgentRpcParams
-from .echo_send_params import EchoSendParams as EchoSendParams
+from .agent_rpc_result import AgentRpcResult as AgentRpcResult
from .span_list_params import SpanListParams as SpanListParams
-from .streaming_status import StreamingStatus as StreamingStatus
from .agent_list_params import AgentListParams as AgentListParams
from .event_list_params import EventListParams as EventListParams
from .state_list_params import StateListParams as StateListParams
+from .agent_rpc_response import AgentRpcResponse as AgentRpcResponse
from .agent_task_tracker import AgentTaskTracker as AgentTaskTracker
from .data_content_param import DataContentParam as DataContentParam
from .span_create_params import SpanCreateParams as SpanCreateParams
from .span_list_response import SpanListResponse as SpanListResponse
from .span_update_params import SpanUpdateParams as SpanUpdateParams
from .task_list_response import TaskListResponse as TaskListResponse
+from .task_message_delta import TaskMessageDelta as TaskMessageDelta
from .text_content_param import TextContentParam as TextContentParam
+from .tool_request_delta import ToolRequestDelta as ToolRequestDelta
from .agent_list_response import AgentListResponse as AgentListResponse
from .event_list_response import EventListResponse as EventListResponse
from .message_list_params import MessageListParams as MessageListParams
from .state_create_params import StateCreateParams as StateCreateParams
from .state_list_response import StateListResponse as StateListResponse
from .state_update_params import StateUpdateParams as StateUpdateParams
+from .tool_response_delta import ToolResponseDelta as ToolResponseDelta
from .tracker_list_params import TrackerListParams as TrackerListParams
+from .task_message_content import TaskMessageContent as TaskMessageContent
from .tool_request_content import ToolRequestContent as ToolRequestContent
from .message_create_params import MessageCreateParams as MessageCreateParams
from .message_list_response import MessageListResponse as MessageListResponse
@@ -41,5 +47,7 @@
from .tool_response_content import ToolResponseContent as ToolResponseContent
from .tracker_list_response import TrackerListResponse as TrackerListResponse
from .tracker_update_params import TrackerUpdateParams as TrackerUpdateParams
+from .agent_rpc_by_name_params import AgentRpcByNameParams as AgentRpcByNameParams
+from .task_message_content_param import TaskMessageContentParam as TaskMessageContentParam
from .tool_request_content_param import ToolRequestContentParam as ToolRequestContentParam
from .tool_response_content_param import ToolResponseContentParam as ToolResponseContentParam
diff --git a/src/agentex/types/agent_rpc_by_name_params.py b/src/agentex/types/agent_rpc_by_name_params.py
new file mode 100644
index 000000000..5be189d7a
--- /dev/null
+++ b/src/agentex/types/agent_rpc_by_name_params.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union
+from typing_extensions import Literal, Required, TypedDict
+
+from .agent_rpc_params import AgentRpcParams
+
+__all__ = ["AgentRpcByNameParams"]
+
+
+class AgentRpcByNameParams(TypedDict, total=False):
+ method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]]
+
+ params: Required[AgentRpcParams]
+ """The parameters for the agent RPC request"""
+
+ id: Union[int, str, None]
+
+ jsonrpc: Literal["2.0"]
diff --git a/src/agentex/types/agent_rpc_params.py b/src/agentex/types/agent_rpc_params.py
index 2c79a7a90..5082b18cb 100644
--- a/src/agentex/types/agent_rpc_params.py
+++ b/src/agentex/types/agent_rpc_params.py
@@ -3,36 +3,14 @@
from __future__ import annotations
from typing import Dict, Union, Optional
-from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from typing_extensions import Required, TypeAlias, TypedDict
-from .data_content_param import DataContentParam
-from .text_content_param import TextContentParam
-from .tool_request_content_param import ToolRequestContentParam
-from .tool_response_content_param import ToolResponseContentParam
+from .task_message_content_param import TaskMessageContentParam
-__all__ = [
- "AgentRpcParams",
- "Params",
- "ParamsCreateTaskRequest",
- "ParamsCancelTaskRequest",
- "ParamsSendMessageRequest",
- "ParamsSendMessageRequestContent",
- "ParamsSendEventRequest",
- "ParamsSendEventRequestContent",
-]
+__all__ = ["AgentRpcParams", "CreateTaskRequest", "CancelTaskRequest", "SendMessageRequest", "SendEventRequest"]
-class AgentRpcParams(TypedDict, total=False):
- method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]]
-
- params: Required[Params]
-
- id: Union[int, str, None]
-
- jsonrpc: Literal["2.0"]
-
-
-class ParamsCreateTaskRequest(TypedDict, total=False):
+class CreateTaskRequest(TypedDict, total=False):
name: Optional[str]
"""The name of the task to create"""
@@ -40,7 +18,7 @@ class ParamsCreateTaskRequest(TypedDict, total=False):
"""The parameters for the task"""
-class ParamsCancelTaskRequest(TypedDict, total=False):
+class CancelTaskRequest(TypedDict, total=False):
task_id: Optional[str]
"""The ID of the task to cancel. Either this or task_name must be provided."""
@@ -48,13 +26,8 @@ class ParamsCancelTaskRequest(TypedDict, total=False):
"""The name of the task to cancel. Either this or task_id must be provided."""
-ParamsSendMessageRequestContent: TypeAlias = Union[
- TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam
-]
-
-
-class ParamsSendMessageRequest(TypedDict, total=False):
- content: Required[ParamsSendMessageRequestContent]
+class SendMessageRequest(TypedDict, total=False):
+ content: Required[TaskMessageContentParam]
"""The message that was sent to the agent"""
stream: bool
@@ -64,13 +37,8 @@ class ParamsSendMessageRequest(TypedDict, total=False):
"""The ID of the task that the message was sent to"""
-ParamsSendEventRequestContent: TypeAlias = Union[
- TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam
-]
-
-
-class ParamsSendEventRequest(TypedDict, total=False):
- content: Optional[ParamsSendEventRequestContent]
+class SendEventRequest(TypedDict, total=False):
+ content: Optional[TaskMessageContentParam]
"""The content to send to the event"""
task_id: Optional[str]
@@ -80,6 +48,4 @@ class ParamsSendEventRequest(TypedDict, total=False):
"""The name of the task that the event was sent to"""
-Params: TypeAlias = Union[
- ParamsCreateTaskRequest, ParamsCancelTaskRequest, ParamsSendMessageRequest, ParamsSendEventRequest
-]
+AgentRpcParams: TypeAlias = Union[CreateTaskRequest, CancelTaskRequest, SendMessageRequest, SendEventRequest]
diff --git a/src/agentex/types/agent_rpc_params1.py b/src/agentex/types/agent_rpc_params1.py
new file mode 100644
index 000000000..3eae8b683
--- /dev/null
+++ b/src/agentex/types/agent_rpc_params1.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union
+from typing_extensions import Literal, Required, TypedDict
+
+from . import agent_rpc_params
+
+__all__ = ["AgentRpcParams"]
+
+
+class AgentRpcParams(TypedDict, total=False):
+ method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]]
+
+ params: Required[agent_rpc_params.AgentRpcParams]
+ """The parameters for the agent RPC request"""
+
+ id: Union[int, str, None]
+
+ jsonrpc: Literal["2.0"]
diff --git a/src/agentex/types/agent_rpc_response.py b/src/agentex/types/agent_rpc_response.py
new file mode 100644
index 000000000..e9995e801
--- /dev/null
+++ b/src/agentex/types/agent_rpc_response.py
@@ -0,0 +1,20 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union, Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+from .agent_rpc_result import AgentRpcResult
+
+__all__ = ["AgentRpcResponse"]
+
+
+class AgentRpcResponse(BaseModel):
+ result: Optional[AgentRpcResult] = None
+ """The result of the agent RPC request"""
+
+ id: Union[int, str, None] = None
+
+ error: Optional[object] = None
+
+ jsonrpc: Optional[Literal["2.0"]] = None
diff --git a/src/agentex/types/agent_rpc_result.py b/src/agentex/types/agent_rpc_result.py
new file mode 100644
index 000000000..63e30a5df
--- /dev/null
+++ b/src/agentex/types/agent_rpc_result.py
@@ -0,0 +1,90 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Union, Optional
+from typing_extensions import Literal, TypeAlias
+
+from .task import Task
+from .event import Event
+from .._models import BaseModel
+from .task_message import TaskMessage
+from .task_message_delta import TaskMessageDelta
+from .task_message_content import TaskMessageContent
+
+__all__ = [
+ "AgentRpcResult",
+ "StreamTaskMessageStart",
+ "StreamTaskMessageDelta",
+ "StreamTaskMessageFull",
+ "StreamTaskMessageDone",
+]
+
+
+class StreamTaskMessageStart(BaseModel):
+ content: TaskMessageContent
+
+ index: Optional[int] = None
+
+ parent_task_message: Optional[TaskMessage] = None
+ """Represents a message in the agent system.
+
+ This entity is used to store messages in MongoDB, with each message associated
+ with a specific task.
+ """
+
+ type: Optional[Literal["start"]] = None
+
+
+class StreamTaskMessageDelta(BaseModel):
+ delta: Optional[TaskMessageDelta] = None
+ """Delta for text updates"""
+
+ index: Optional[int] = None
+
+ parent_task_message: Optional[TaskMessage] = None
+ """Represents a message in the agent system.
+
+ This entity is used to store messages in MongoDB, with each message associated
+ with a specific task.
+ """
+
+ type: Optional[Literal["delta"]] = None
+
+
+class StreamTaskMessageFull(BaseModel):
+ content: TaskMessageContent
+
+ index: Optional[int] = None
+
+ parent_task_message: Optional[TaskMessage] = None
+ """Represents a message in the agent system.
+
+ This entity is used to store messages in MongoDB, with each message associated
+ with a specific task.
+ """
+
+ type: Optional[Literal["full"]] = None
+
+
+class StreamTaskMessageDone(BaseModel):
+ index: Optional[int] = None
+
+ parent_task_message: Optional[TaskMessage] = None
+ """Represents a message in the agent system.
+
+ This entity is used to store messages in MongoDB, with each message associated
+ with a specific task.
+ """
+
+ type: Optional[Literal["done"]] = None
+
+
+AgentRpcResult: TypeAlias = Union[
+ List[TaskMessage],
+ StreamTaskMessageStart,
+ StreamTaskMessageDelta,
+ StreamTaskMessageFull,
+ StreamTaskMessageDone,
+ Task,
+ Event,
+ None,
+]
diff --git a/src/agentex/types/agents/__init__.py b/src/agentex/types/agents/__init__.py
deleted file mode 100644
index 6b2986a26..000000000
--- a/src/agentex/types/agents/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .name_rpc_params import NameRpcParams as NameRpcParams
diff --git a/src/agentex/types/agents/name_rpc_params.py b/src/agentex/types/agents/name_rpc_params.py
deleted file mode 100644
index 6a68b0ba6..000000000
--- a/src/agentex/types/agents/name_rpc_params.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Dict, Union, Optional
-from typing_extensions import Literal, Required, TypeAlias, TypedDict
-
-from ..data_content_param import DataContentParam
-from ..text_content_param import TextContentParam
-from ..tool_request_content_param import ToolRequestContentParam
-from ..tool_response_content_param import ToolResponseContentParam
-
-__all__ = [
- "NameRpcParams",
- "Params",
- "ParamsCreateTaskRequest",
- "ParamsCancelTaskRequest",
- "ParamsSendMessageRequest",
- "ParamsSendMessageRequestContent",
- "ParamsSendEventRequest",
- "ParamsSendEventRequestContent",
-]
-
-
-class NameRpcParams(TypedDict, total=False):
- method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]]
-
- params: Required[Params]
-
- id: Union[int, str, None]
-
- jsonrpc: Literal["2.0"]
-
-
-class ParamsCreateTaskRequest(TypedDict, total=False):
- name: Optional[str]
- """The name of the task to create"""
-
- params: Optional[Dict[str, object]]
- """The parameters for the task"""
-
-
-class ParamsCancelTaskRequest(TypedDict, total=False):
- task_id: Optional[str]
- """The ID of the task to cancel. Either this or task_name must be provided."""
-
- task_name: Optional[str]
- """The name of the task to cancel. Either this or task_id must be provided."""
-
-
-ParamsSendMessageRequestContent: TypeAlias = Union[
- TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam
-]
-
-
-class ParamsSendMessageRequest(TypedDict, total=False):
- content: Required[ParamsSendMessageRequestContent]
- """The message that was sent to the agent"""
-
- stream: bool
- """Whether to stream the response message back to the client"""
-
- task_id: Optional[str]
- """The ID of the task that the message was sent to"""
-
-
-ParamsSendEventRequestContent: TypeAlias = Union[
- TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam
-]
-
-
-class ParamsSendEventRequest(TypedDict, total=False):
- content: Optional[ParamsSendEventRequestContent]
- """The content to send to the event"""
-
- task_id: Optional[str]
- """The ID of the task that the event was sent to"""
-
- task_name: Optional[str]
- """The name of the task that the event was sent to"""
-
-
-Params: TypeAlias = Union[
- ParamsCreateTaskRequest, ParamsCancelTaskRequest, ParamsSendMessageRequest, ParamsSendEventRequest
-]
diff --git a/src/agentex/types/data_content.py b/src/agentex/types/data_content.py
index 2ed340454..5cb479921 100644
--- a/src/agentex/types/data_content.py
+++ b/src/agentex/types/data_content.py
@@ -20,11 +20,11 @@ class DataContent(BaseModel):
data: Dict[str, object]
"""The contents of the data message."""
- style: Optional[MessageStyle] = None
+ style: MessageStyle = "static"
"""The style of the message.
This is used by the client to determine how to display the message.
"""
- type: Optional[Literal["data"]] = None
+ type: Literal["data"] = "data"
"""The type of the message, in this case `data`."""
diff --git a/src/agentex/types/data_delta.py b/src/agentex/types/data_delta.py
new file mode 100644
index 000000000..e30c12e71
--- /dev/null
+++ b/src/agentex/types/data_delta.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["DataDelta"]
+
+
+class DataDelta(BaseModel):
+ data_delta: Optional[str] = None
+
+ type: Optional[Literal["data"]] = None
diff --git a/src/agentex/types/echo_send_params.py b/src/agentex/types/echo_send_params.py
deleted file mode 100644
index 3e237817e..000000000
--- a/src/agentex/types/echo_send_params.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing_extensions import Required, TypedDict
-
-__all__ = ["EchoSendParams"]
-
-
-class EchoSendParams(TypedDict, total=False):
- message: Required[str]
diff --git a/src/agentex/types/event.py b/src/agentex/types/event.py
index 3f1e5997d..9a544cb66 100644
--- a/src/agentex/types/event.py
+++ b/src/agentex/types/event.py
@@ -1,21 +1,12 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Union, Optional
+from typing import Optional
from datetime import datetime
-from typing_extensions import Annotated, TypeAlias
-from .._utils import PropertyInfo
from .._models import BaseModel
-from .data_content import DataContent
-from .text_content import TextContent
-from .tool_request_content import ToolRequestContent
-from .tool_response_content import ToolResponseContent
+from .task_message_content import TaskMessageContent
-__all__ = ["Event", "Content"]
-
-Content: TypeAlias = Annotated[
- Union[TextContent, DataContent, ToolRequestContent, ToolResponseContent, None], PropertyInfo(discriminator="type")
-]
+__all__ = ["Event"]
class Event(BaseModel):
@@ -31,7 +22,7 @@ class Event(BaseModel):
task_id: str
"""The UUID of the task that the event belongs to"""
- content: Optional[Content] = None
+ content: Optional[TaskMessageContent] = None
"""The content of the event"""
created_at: Optional[datetime] = None
diff --git a/src/agentex/types/message_create_params.py b/src/agentex/types/message_create_params.py
index 06ec3727a..787715ec6 100644
--- a/src/agentex/types/message_create_params.py
+++ b/src/agentex/types/message_create_params.py
@@ -2,24 +2,17 @@
from __future__ import annotations
-from typing import Union, Optional
-from typing_extensions import Required, TypeAlias, TypedDict
+from typing import Optional
+from typing_extensions import Literal, Required, TypedDict
-from .streaming_status import StreamingStatus
-from .data_content_param import DataContentParam
-from .text_content_param import TextContentParam
-from .tool_request_content_param import ToolRequestContentParam
-from .tool_response_content_param import ToolResponseContentParam
+from .task_message_content_param import TaskMessageContentParam
-__all__ = ["MessageCreateParams", "Content"]
+__all__ = ["MessageCreateParams"]
class MessageCreateParams(TypedDict, total=False):
- content: Required[Content]
+ content: Required[TaskMessageContentParam]
task_id: Required[str]
- streaming_status: Optional[StreamingStatus]
-
-
-Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam]
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]]
diff --git a/src/agentex/types/message_update_params.py b/src/agentex/types/message_update_params.py
index bc5305b8e..ea3dbaf89 100644
--- a/src/agentex/types/message_update_params.py
+++ b/src/agentex/types/message_update_params.py
@@ -2,24 +2,17 @@
from __future__ import annotations
-from typing import Union, Optional
-from typing_extensions import Required, TypeAlias, TypedDict
+from typing import Optional
+from typing_extensions import Literal, Required, TypedDict
-from .streaming_status import StreamingStatus
-from .data_content_param import DataContentParam
-from .text_content_param import TextContentParam
-from .tool_request_content_param import ToolRequestContentParam
-from .tool_response_content_param import ToolResponseContentParam
+from .task_message_content_param import TaskMessageContentParam
-__all__ = ["MessageUpdateParams", "Content"]
+__all__ = ["MessageUpdateParams"]
class MessageUpdateParams(TypedDict, total=False):
- content: Required[Content]
+ content: Required[TaskMessageContentParam]
task_id: Required[str]
- streaming_status: Optional[StreamingStatus]
-
-
-Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam]
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]]
diff --git a/src/agentex/types/messages/batch_create_params.py b/src/agentex/types/messages/batch_create_params.py
index c09c69a9e..0a0153aea 100644
--- a/src/agentex/types/messages/batch_create_params.py
+++ b/src/agentex/types/messages/batch_create_params.py
@@ -2,21 +2,15 @@
from __future__ import annotations
-from typing import Union, Iterable
-from typing_extensions import Required, TypeAlias, TypedDict
+from typing import Iterable
+from typing_extensions import Required, TypedDict
-from ..data_content_param import DataContentParam
-from ..text_content_param import TextContentParam
-from ..tool_request_content_param import ToolRequestContentParam
-from ..tool_response_content_param import ToolResponseContentParam
+from ..task_message_content_param import TaskMessageContentParam
-__all__ = ["BatchCreateParams", "Content"]
+__all__ = ["BatchCreateParams"]
class BatchCreateParams(TypedDict, total=False):
- contents: Required[Iterable[Content]]
+ contents: Required[Iterable[TaskMessageContentParam]]
task_id: Required[str]
-
-
-Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam]
diff --git a/src/agentex/types/messages/batch_update_params.py b/src/agentex/types/messages/batch_update_params.py
index 68f24f8d7..c25e46f6c 100644
--- a/src/agentex/types/messages/batch_update_params.py
+++ b/src/agentex/types/messages/batch_update_params.py
@@ -2,21 +2,15 @@
from __future__ import annotations
-from typing import Dict, Union
-from typing_extensions import Required, TypeAlias, TypedDict
+from typing import Dict
+from typing_extensions import Required, TypedDict
-from ..data_content_param import DataContentParam
-from ..text_content_param import TextContentParam
-from ..tool_request_content_param import ToolRequestContentParam
-from ..tool_response_content_param import ToolResponseContentParam
+from ..task_message_content_param import TaskMessageContentParam
-__all__ = ["BatchUpdateParams", "Updates"]
+__all__ = ["BatchUpdateParams"]
class BatchUpdateParams(TypedDict, total=False):
task_id: Required[str]
- updates: Required[Dict[str, Updates]]
-
-
-Updates: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam]
+ updates: Required[Dict[str, TaskMessageContentParam]]
diff --git a/src/agentex/types/streaming_status.py b/src/agentex/types/streaming_status.py
deleted file mode 100644
index 196d78815..000000000
--- a/src/agentex/types/streaming_status.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing_extensions import Literal, TypeAlias
-
-__all__ = ["StreamingStatus"]
-
-StreamingStatus: TypeAlias = Literal["IN_PROGRESS", "DONE"]
diff --git a/src/agentex/types/task_message.py b/src/agentex/types/task_message.py
index 686ade7d6..b5938cc8e 100644
--- a/src/agentex/types/task_message.py
+++ b/src/agentex/types/task_message.py
@@ -1,36 +1,33 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Union, Optional
+from typing import Optional
from datetime import datetime
-from typing_extensions import Annotated, TypeAlias
+from typing_extensions import Literal
-from .._utils import PropertyInfo
from .._models import BaseModel
-from .data_content import DataContent
-from .text_content import TextContent
-from .streaming_status import StreamingStatus
-from .tool_request_content import ToolRequestContent
-from .tool_response_content import ToolResponseContent
+from .task_message_content import TaskMessageContent
-__all__ = ["TaskMessage", "Content"]
-
-Content: TypeAlias = Annotated[
- Union[TextContent, DataContent, ToolRequestContent, ToolResponseContent], PropertyInfo(discriminator="type")
-]
+__all__ = ["TaskMessage"]
class TaskMessage(BaseModel):
- id: str
- """The task message's unique id"""
+ content: TaskMessageContent
+ """The content of the message.
- content: Content
-
- created_at: datetime
- """The timestamp when the message was created"""
+ This content is not OpenAI compatible. These are messages that are meant to be
+ displayed to the user.
+ """
task_id: str
+ """ID of the task this message belongs to"""
+
+ id: Optional[str] = None
+ """The task message's unique id"""
+
+ created_at: Optional[datetime] = None
+ """The timestamp when the message was created"""
- streaming_status: Optional[StreamingStatus] = None
+ streaming_status: Optional[Literal["IN_PROGRESS", "DONE"]] = None
updated_at: Optional[datetime] = None
"""The timestamp when the message was last updated"""
diff --git a/src/agentex/types/task_message_content.py b/src/agentex/types/task_message_content.py
new file mode 100644
index 000000000..126992b40
--- /dev/null
+++ b/src/agentex/types/task_message_content.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .data_content import DataContent
+from .text_content import TextContent
+from .tool_request_content import ToolRequestContent
+from .tool_response_content import ToolResponseContent
+
+__all__ = ["TaskMessageContent"]
+
+TaskMessageContent: TypeAlias = Annotated[
+ Union[TextContent, DataContent, ToolRequestContent, ToolResponseContent], PropertyInfo(discriminator="type")
+]
diff --git a/src/agentex/types/task_message_content_param.py b/src/agentex/types/task_message_content_param.py
new file mode 100644
index 000000000..6991c64e7
--- /dev/null
+++ b/src/agentex/types/task_message_content_param.py
@@ -0,0 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union
+from typing_extensions import TypeAlias
+
+from .data_content_param import DataContentParam
+from .text_content_param import TextContentParam
+from .tool_request_content_param import ToolRequestContentParam
+from .tool_response_content_param import ToolResponseContentParam
+
+__all__ = ["TaskMessageContentParam"]
+
+TaskMessageContentParam: TypeAlias = Union[
+ TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam
+]
diff --git a/src/agentex/types/task_message_delta.py b/src/agentex/types/task_message_delta.py
new file mode 100644
index 000000000..5a477e5a3
--- /dev/null
+++ b/src/agentex/types/task_message_delta.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .data_delta import DataDelta
+from .text_delta import TextDelta
+from .tool_request_delta import ToolRequestDelta
+from .tool_response_delta import ToolResponseDelta
+
+__all__ = ["TaskMessageDelta"]
+
+TaskMessageDelta: TypeAlias = Annotated[
+ Union[TextDelta, DataDelta, ToolRequestDelta, ToolResponseDelta], PropertyInfo(discriminator="type")
+]
diff --git a/src/agentex/types/tasks/__init__.py b/src/agentex/types/tasks/__init__.py
deleted file mode 100644
index f8ee8b14b..000000000
--- a/src/agentex/types/tasks/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
diff --git a/src/agentex/types/text_content.py b/src/agentex/types/text_content.py
index c96c797f8..f4586698d 100644
--- a/src/agentex/types/text_content.py
+++ b/src/agentex/types/text_content.py
@@ -37,17 +37,17 @@ class TextContent(BaseModel):
attachments: Optional[List[Attachment]] = None
"""Optional list of file attachments with structured metadata."""
- format: Optional[Literal["markdown", "plain", "code"]] = None
+ format: Literal["markdown", "plain", "code"] = "plain"
"""The format of the message.
This is used by the client to determine how to display the message.
"""
- style: Optional[MessageStyle] = None
+ style: MessageStyle = "static"
"""The style of the message.
This is used by the client to determine how to display the message.
"""
- type: Optional[Literal["text"]] = None
+ type: Literal["text"] = "text"
"""The type of the message, in this case `text`."""
diff --git a/src/agentex/types/text_delta.py b/src/agentex/types/text_delta.py
new file mode 100644
index 000000000..29a60cfd2
--- /dev/null
+++ b/src/agentex/types/text_delta.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["TextDelta"]
+
+
+class TextDelta(BaseModel):
+ text_delta: Optional[str] = None
+
+ type: Optional[Literal["text"]] = None
diff --git a/src/agentex/types/tool_request_content.py b/src/agentex/types/tool_request_content.py
index 66128630a..4fd52eb8f 100644
--- a/src/agentex/types/tool_request_content.py
+++ b/src/agentex/types/tool_request_content.py
@@ -26,11 +26,11 @@ class ToolRequestContent(BaseModel):
tool_call_id: str
"""The ID of the tool call that is being requested."""
- style: Optional[MessageStyle] = None
+ style: MessageStyle = "static"
"""The style of the message.
This is used by the client to determine how to display the message.
"""
- type: Optional[Literal["tool_request"]] = None
+ type: Literal["tool_request"] = "tool_request"
"""The type of the message, in this case `tool_request`."""
diff --git a/src/agentex/types/tool_request_delta.py b/src/agentex/types/tool_request_delta.py
new file mode 100644
index 000000000..7877dfdde
--- /dev/null
+++ b/src/agentex/types/tool_request_delta.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["ToolRequestDelta"]
+
+
+class ToolRequestDelta(BaseModel):
+ name: str
+
+ tool_call_id: str
+
+ arguments_delta: Optional[str] = None
+
+ type: Optional[Literal["tool_request"]] = None
diff --git a/src/agentex/types/tool_response_content.py b/src/agentex/types/tool_response_content.py
index f6ba15b72..7f72443cc 100644
--- a/src/agentex/types/tool_response_content.py
+++ b/src/agentex/types/tool_response_content.py
@@ -26,11 +26,11 @@ class ToolResponseContent(BaseModel):
tool_call_id: str
"""The ID of the tool call that is being responded to."""
- style: Optional[MessageStyle] = None
+ style: MessageStyle = "static"
"""The style of the message.
This is used by the client to determine how to display the message.
"""
- type: Optional[Literal["tool_response"]] = None
+ type: Literal["tool_response"] = "tool_response"
"""The type of the message, in this case `tool_response`."""
diff --git a/src/agentex/types/tool_response_delta.py b/src/agentex/types/tool_response_delta.py
new file mode 100644
index 000000000..383770db9
--- /dev/null
+++ b/src/agentex/types/tool_response_delta.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["ToolResponseDelta"]
+
+
+class ToolResponseDelta(BaseModel):
+ name: str
+
+ tool_call_id: str
+
+ content_delta: Optional[str] = None
+
+ type: Optional[Literal["tool_response"]] = None
diff --git a/src/agentex_sdk/lib/.keep b/src/agentex_sdk/lib/.keep
deleted file mode 100644
index 5e2c99fdb..000000000
--- a/src/agentex_sdk/lib/.keep
+++ /dev/null
@@ -1,4 +0,0 @@
-File generated from our OpenAPI spec by Stainless.
-
-This directory can be used to store custom files to expand the SDK.
-It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
\ No newline at end of file
diff --git a/tests/api_resources/agents/__init__.py b/tests/api_resources/agents/__init__.py
deleted file mode 100644
index fd8019a9a..000000000
--- a/tests/api_resources/agents/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/agents/test_name.py b/tests/api_resources/agents/test_name.py
deleted file mode 100644
index a16fe4477..000000000
--- a/tests/api_resources/agents/test_name.py
+++ /dev/null
@@ -1,322 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from agentex import Agentex, AsyncAgentex
-from tests.utils import assert_matches_type
-from agentex.types import Agent
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestName:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_retrieve(self, client: Agentex) -> None:
- name = client.agents.name.retrieve(
- "agent_name",
- )
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_retrieve(self, client: Agentex) -> None:
- response = client.agents.name.with_raw_response.retrieve(
- "agent_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_retrieve(self, client: Agentex) -> None:
- with client.agents.name.with_streaming_response.retrieve(
- "agent_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_retrieve(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- client.agents.name.with_raw_response.retrieve(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- def test_method_delete(self, client: Agentex) -> None:
- name = client.agents.name.delete(
- "agent_name",
- )
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_delete(self, client: Agentex) -> None:
- response = client.agents.name.with_raw_response.delete(
- "agent_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_delete(self, client: Agentex) -> None:
- with client.agents.name.with_streaming_response.delete(
- "agent_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_delete(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- client.agents.name.with_raw_response.delete(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- def test_method_rpc(self, client: Agentex) -> None:
- name = client.agents.name.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- )
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_rpc_with_all_params(self, client: Agentex) -> None:
- name = client.agents.name.rpc(
- agent_name="agent_name",
- method="event/send",
- params={
- "name": "name",
- "params": {"foo": "bar"},
- },
- id=0,
- jsonrpc="2.0",
- )
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_rpc(self, client: Agentex) -> None:
- response = client.agents.name.with_raw_response.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = response.parse()
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_rpc(self, client: Agentex) -> None:
- with client.agents.name.with_streaming_response.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = response.parse()
- assert_matches_type(object, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_rpc(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- client.agents.name.with_raw_response.rpc(
- agent_name="",
- method="event/send",
- params={},
- )
-
-
-class TestAsyncName:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_retrieve(self, async_client: AsyncAgentex) -> None:
- name = await async_client.agents.name.retrieve(
- "agent_name",
- )
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None:
- response = await async_client.agents.name.with_raw_response.retrieve(
- "agent_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = await response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None:
- async with async_client.agents.name.with_streaming_response.retrieve(
- "agent_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = await response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- await async_client.agents.name.with_raw_response.retrieve(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_delete(self, async_client: AsyncAgentex) -> None:
- name = await async_client.agents.name.delete(
- "agent_name",
- )
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None:
- response = await async_client.agents.name.with_raw_response.delete(
- "agent_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = await response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None:
- async with async_client.agents.name.with_streaming_response.delete(
- "agent_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = await response.parse()
- assert_matches_type(Agent, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_delete(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- await async_client.agents.name.with_raw_response.delete(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_rpc(self, async_client: AsyncAgentex) -> None:
- name = await async_client.agents.name.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- )
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_rpc_with_all_params(self, async_client: AsyncAgentex) -> None:
- name = await async_client.agents.name.rpc(
- agent_name="agent_name",
- method="event/send",
- params={
- "name": "name",
- "params": {"foo": "bar"},
- },
- id=0,
- jsonrpc="2.0",
- )
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_rpc(self, async_client: AsyncAgentex) -> None:
- response = await async_client.agents.name.with_raw_response.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = await response.parse()
- assert_matches_type(object, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_rpc(self, async_client: AsyncAgentex) -> None:
- async with async_client.agents.name.with_streaming_response.rpc(
- agent_name="agent_name",
- method="event/send",
- params={},
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = await response.parse()
- assert_matches_type(object, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_rpc(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
- await async_client.agents.name.with_raw_response.rpc(
- agent_name="",
- method="event/send",
- params={},
- )
diff --git a/tests/api_resources/tasks/__init__.py b/tests/api_resources/tasks/__init__.py
deleted file mode 100644
index fd8019a9a..000000000
--- a/tests/api_resources/tasks/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/tasks/test_name.py b/tests/api_resources/tasks/test_name.py
deleted file mode 100644
index cb0006592..000000000
--- a/tests/api_resources/tasks/test_name.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from agentex import Agentex, AsyncAgentex
-from tests.utils import assert_matches_type
-from agentex.types import Task
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestName:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_retrieve(self, client: Agentex) -> None:
- name = client.tasks.name.retrieve(
- "task_name",
- )
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_retrieve(self, client: Agentex) -> None:
- response = client.tasks.name.with_raw_response.retrieve(
- "task_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_retrieve(self, client: Agentex) -> None:
- with client.tasks.name.with_streaming_response.retrieve(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_retrieve(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- client.tasks.name.with_raw_response.retrieve(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- def test_method_delete(self, client: Agentex) -> None:
- name = client.tasks.name.delete(
- "task_name",
- )
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_delete(self, client: Agentex) -> None:
- response = client.tasks.name.with_raw_response.delete(
- "task_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_delete(self, client: Agentex) -> None:
- with client.tasks.name.with_streaming_response.delete(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_delete(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- client.tasks.name.with_raw_response.delete(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- def test_method_stream_events(self, client: Agentex) -> None:
- name_stream = client.tasks.name.stream_events(
- "task_name",
- )
- name_stream.response.close()
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_stream_events(self, client: Agentex) -> None:
- response = client.tasks.name.with_raw_response.stream_events(
- "task_name",
- )
-
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = response.parse()
- stream.close()
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_stream_events(self, client: Agentex) -> None:
- with client.tasks.name.with_streaming_response.stream_events(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- stream = response.parse()
- stream.close()
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- def test_path_params_stream_events(self, client: Agentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- client.tasks.name.with_raw_response.stream_events(
- "",
- )
-
-
-class TestAsyncName:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_retrieve(self, async_client: AsyncAgentex) -> None:
- name = await async_client.tasks.name.retrieve(
- "task_name",
- )
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None:
- response = await async_client.tasks.name.with_raw_response.retrieve(
- "task_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = await response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None:
- async with async_client.tasks.name.with_streaming_response.retrieve(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = await response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- await async_client.tasks.name.with_raw_response.retrieve(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_delete(self, async_client: AsyncAgentex) -> None:
- name = await async_client.tasks.name.delete(
- "task_name",
- )
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None:
- response = await async_client.tasks.name.with_raw_response.delete(
- "task_name",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- name = await response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None:
- async with async_client.tasks.name.with_streaming_response.delete(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- name = await response.parse()
- assert_matches_type(Task, name, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_delete(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- await async_client.tasks.name.with_raw_response.delete(
- "",
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_stream_events(self, async_client: AsyncAgentex) -> None:
- name_stream = await async_client.tasks.name.stream_events(
- "task_name",
- )
- await name_stream.response.aclose()
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_stream_events(self, async_client: AsyncAgentex) -> None:
- response = await async_client.tasks.name.with_raw_response.stream_events(
- "task_name",
- )
-
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = await response.parse()
- await stream.close()
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_stream_events(self, async_client: AsyncAgentex) -> None:
- async with async_client.tasks.name.with_streaming_response.stream_events(
- "task_name",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- stream = await response.parse()
- await stream.close()
-
- assert cast(Any, response.is_closed) is True
-
- @pytest.mark.skip()
- @parametrize
- async def test_path_params_stream_events(self, async_client: AsyncAgentex) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
- await async_client.tasks.name.with_raw_response.stream_events(
- "",
- )
diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py
index de1055f79..7cb793750 100644
--- a/tests/api_resources/test_agents.py
+++ b/tests/api_resources/test_agents.py
@@ -9,7 +9,11 @@
from agentex import Agentex, AsyncAgentex
from tests.utils import assert_matches_type
-from agentex.types import Agent, AgentListResponse
+from agentex.types import (
+ Agent,
+ AgentRpcResponse,
+ AgentListResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -137,6 +141,90 @@ def test_path_params_delete(self, client: Agentex) -> None:
"",
)
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_delete_by_name(self, client: Agentex) -> None:
+ agent = client.agents.delete_by_name(
+ "agent_name",
+ )
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_delete_by_name(self, client: Agentex) -> None:
+ response = client.agents.with_raw_response.delete_by_name(
+ "agent_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_delete_by_name(self, client: Agentex) -> None:
+ with client.agents.with_streaming_response.delete_by_name(
+ "agent_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_delete_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ client.agents.with_raw_response.delete_by_name(
+ "",
+ )
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_retrieve_by_name(self, client: Agentex) -> None:
+ agent = client.agents.retrieve_by_name(
+ "agent_name",
+ )
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_retrieve_by_name(self, client: Agentex) -> None:
+ response = client.agents.with_raw_response.retrieve_by_name(
+ "agent_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_retrieve_by_name(self, client: Agentex) -> None:
+ with client.agents.with_streaming_response.retrieve_by_name(
+ "agent_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_retrieve_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ client.agents.with_raw_response.retrieve_by_name(
+ "",
+ )
+
@pytest.mark.skip()
@parametrize
def test_method_rpc(self, client: Agentex) -> None:
@@ -145,7 +233,7 @@ def test_method_rpc(self, client: Agentex) -> None:
method="event/send",
params={},
)
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -160,7 +248,7 @@ def test_method_rpc_with_all_params(self, client: Agentex) -> None:
id=0,
jsonrpc="2.0",
)
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -174,7 +262,7 @@ def test_raw_response_rpc(self, client: Agentex) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
agent = response.parse()
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -188,7 +276,7 @@ def test_streaming_response_rpc(self, client: Agentex) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
agent = response.parse()
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -202,6 +290,71 @@ def test_path_params_rpc(self, client: Agentex) -> None:
params={},
)
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_rpc_by_name(self, client: Agentex) -> None:
+ agent = client.agents.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ )
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_rpc_by_name_with_all_params(self, client: Agentex) -> None:
+ agent = client.agents.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={
+ "name": "name",
+ "params": {"foo": "bar"},
+ },
+ id=0,
+ jsonrpc="2.0",
+ )
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_rpc_by_name(self, client: Agentex) -> None:
+ response = client.agents.with_raw_response.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = response.parse()
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_rpc_by_name(self, client: Agentex) -> None:
+ with client.agents.with_streaming_response.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = response.parse()
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_rpc_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ client.agents.with_raw_response.rpc_by_name(
+ agent_name="",
+ method="event/send",
+ params={},
+ )
+
class TestAsyncAgents:
parametrize = pytest.mark.parametrize(
@@ -328,6 +481,90 @@ async def test_path_params_delete(self, async_client: AsyncAgentex) -> None:
"",
)
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ agent = await async_client.agents.delete_by_name(
+ "agent_name",
+ )
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.agents.with_raw_response.delete_by_name(
+ "agent_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = await response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.agents.with_streaming_response.delete_by_name(
+ "agent_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = await response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ await async_client.agents.with_raw_response.delete_by_name(
+ "",
+ )
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ agent = await async_client.agents.retrieve_by_name(
+ "agent_name",
+ )
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.agents.with_raw_response.retrieve_by_name(
+ "agent_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = await response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.agents.with_streaming_response.retrieve_by_name(
+ "agent_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = await response.parse()
+ assert_matches_type(Agent, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ await async_client.agents.with_raw_response.retrieve_by_name(
+ "",
+ )
+
@pytest.mark.skip()
@parametrize
async def test_method_rpc(self, async_client: AsyncAgentex) -> None:
@@ -336,7 +573,7 @@ async def test_method_rpc(self, async_client: AsyncAgentex) -> None:
method="event/send",
params={},
)
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -351,7 +588,7 @@ async def test_method_rpc_with_all_params(self, async_client: AsyncAgentex) -> N
id=0,
jsonrpc="2.0",
)
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -365,7 +602,7 @@ async def test_raw_response_rpc(self, async_client: AsyncAgentex) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
agent = await response.parse()
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
@pytest.mark.skip()
@parametrize
@@ -379,7 +616,7 @@ async def test_streaming_response_rpc(self, async_client: AsyncAgentex) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
agent = await response.parse()
- assert_matches_type(object, agent, path=["response"])
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -392,3 +629,68 @@ async def test_path_params_rpc(self, async_client: AsyncAgentex) -> None:
method="event/send",
params={},
)
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_rpc_by_name(self, async_client: AsyncAgentex) -> None:
+ agent = await async_client.agents.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ )
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_rpc_by_name_with_all_params(self, async_client: AsyncAgentex) -> None:
+ agent = await async_client.agents.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={
+ "name": "name",
+ "params": {"foo": "bar"},
+ },
+ id=0,
+ jsonrpc="2.0",
+ )
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_rpc_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.agents.with_raw_response.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = await response.parse()
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_rpc_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.agents.with_streaming_response.rpc_by_name(
+ agent_name="agent_name",
+ method="event/send",
+ params={},
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = await response.parse()
+ assert_matches_type(AgentRpcResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_rpc_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"):
+ await async_client.agents.with_raw_response.rpc_by_name(
+ agent_name="",
+ method="event/send",
+ params={},
+ )
diff --git a/tests/api_resources/test_client.py b/tests/api_resources/test_client.py
deleted file mode 100644
index 82802d718..000000000
--- a/tests/api_resources/test_client.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from agentex import Agentex, AsyncAgentex
-from tests.utils import assert_matches_type
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestClient:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_get_root(self, client: Agentex) -> None:
- client_ = client.get_root()
- assert_matches_type(object, client_, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_get_root(self, client: Agentex) -> None:
- response = client.with_raw_response.get_root()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- client_ = response.parse()
- assert_matches_type(object, client_, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_get_root(self, client: Agentex) -> None:
- with client.with_streaming_response.get_root() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- client_ = response.parse()
- assert_matches_type(object, client_, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
-
-class TestAsyncClient:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_get_root(self, async_client: AsyncAgentex) -> None:
- client = await async_client.get_root()
- assert_matches_type(object, client, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_get_root(self, async_client: AsyncAgentex) -> None:
- response = await async_client.with_raw_response.get_root()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- client = await response.parse()
- assert_matches_type(object, client, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_get_root(self, async_client: AsyncAgentex) -> None:
- async with async_client.with_streaming_response.get_root() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- client = await response.parse()
- assert_matches_type(object, client, path=["response"])
-
- assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_echo.py b/tests/api_resources/test_echo.py
deleted file mode 100644
index 1a40d73f3..000000000
--- a/tests/api_resources/test_echo.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from agentex import Agentex, AsyncAgentex
-from tests.utils import assert_matches_type
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestEcho:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_send(self, client: Agentex) -> None:
- echo = client.echo.send(
- message="message",
- )
- assert_matches_type(object, echo, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_send(self, client: Agentex) -> None:
- response = client.echo.with_raw_response.send(
- message="message",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- echo = response.parse()
- assert_matches_type(object, echo, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_send(self, client: Agentex) -> None:
- with client.echo.with_streaming_response.send(
- message="message",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- echo = response.parse()
- assert_matches_type(object, echo, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
-
-class TestAsyncEcho:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_send(self, async_client: AsyncAgentex) -> None:
- echo = await async_client.echo.send(
- message="message",
- )
- assert_matches_type(object, echo, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_send(self, async_client: AsyncAgentex) -> None:
- response = await async_client.echo.with_raw_response.send(
- message="message",
- )
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- echo = await response.parse()
- assert_matches_type(object, echo, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_send(self, async_client: AsyncAgentex) -> None:
- async with async_client.echo.with_streaming_response.send(
- message="message",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- echo = await response.parse()
- assert_matches_type(object, echo, path=["response"])
-
- assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_tasks.py b/tests/api_resources/test_tasks.py
index 060f051cf..0c9af1b4e 100644
--- a/tests/api_resources/test_tasks.py
+++ b/tests/api_resources/test_tasks.py
@@ -129,6 +129,90 @@ def test_path_params_delete(self, client: Agentex) -> None:
"",
)
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_delete_by_name(self, client: Agentex) -> None:
+ task = client.tasks.delete_by_name(
+ "task_name",
+ )
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_delete_by_name(self, client: Agentex) -> None:
+ response = client.tasks.with_raw_response.delete_by_name(
+ "task_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task = response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_delete_by_name(self, client: Agentex) -> None:
+ with client.tasks.with_streaming_response.delete_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task = response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_delete_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ client.tasks.with_raw_response.delete_by_name(
+ "",
+ )
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_retrieve_by_name(self, client: Agentex) -> None:
+ task = client.tasks.retrieve_by_name(
+ "task_name",
+ )
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_retrieve_by_name(self, client: Agentex) -> None:
+ response = client.tasks.with_raw_response.retrieve_by_name(
+ "task_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task = response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_retrieve_by_name(self, client: Agentex) -> None:
+ with client.tasks.with_streaming_response.retrieve_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task = response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_retrieve_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ client.tasks.with_raw_response.retrieve_by_name(
+ "",
+ )
+
@pytest.mark.skip()
@parametrize
def test_method_stream_events(self, client: Agentex) -> None:
@@ -170,6 +254,47 @@ def test_path_params_stream_events(self, client: Agentex) -> None:
"",
)
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_stream_events_by_name(self, client: Agentex) -> None:
+ task_stream = client.tasks.stream_events_by_name(
+ "task_name",
+ )
+ task_stream.response.close()
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_stream_events_by_name(self, client: Agentex) -> None:
+ response = client.tasks.with_raw_response.stream_events_by_name(
+ "task_name",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = response.parse()
+ stream.close()
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_stream_events_by_name(self, client: Agentex) -> None:
+ with client.tasks.with_streaming_response.stream_events_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = response.parse()
+ stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_path_params_stream_events_by_name(self, client: Agentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ client.tasks.with_raw_response.stream_events_by_name(
+ "",
+ )
+
class TestAsyncTasks:
parametrize = pytest.mark.parametrize(
@@ -288,6 +413,90 @@ async def test_path_params_delete(self, async_client: AsyncAgentex) -> None:
"",
)
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ task = await async_client.tasks.delete_by_name(
+ "task_name",
+ )
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.tasks.with_raw_response.delete_by_name(
+ "task_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task = await response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.tasks.with_streaming_response.delete_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task = await response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_delete_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ await async_client.tasks.with_raw_response.delete_by_name(
+ "",
+ )
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ task = await async_client.tasks.retrieve_by_name(
+ "task_name",
+ )
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.tasks.with_raw_response.retrieve_by_name(
+ "task_name",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task = await response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.tasks.with_streaming_response.retrieve_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task = await response.parse()
+ assert_matches_type(Task, task, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_retrieve_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ await async_client.tasks.with_raw_response.retrieve_by_name(
+ "",
+ )
+
@pytest.mark.skip()
@parametrize
async def test_method_stream_events(self, async_client: AsyncAgentex) -> None:
@@ -328,3 +537,44 @@ async def test_path_params_stream_events(self, async_client: AsyncAgentex) -> No
await async_client.tasks.with_raw_response.stream_events(
"",
)
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_stream_events_by_name(self, async_client: AsyncAgentex) -> None:
+ task_stream = await async_client.tasks.stream_events_by_name(
+ "task_name",
+ )
+ await task_stream.response.aclose()
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_stream_events_by_name(self, async_client: AsyncAgentex) -> None:
+ response = await async_client.tasks.with_raw_response.stream_events_by_name(
+ "task_name",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = await response.parse()
+ await stream.close()
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_stream_events_by_name(self, async_client: AsyncAgentex) -> None:
+ async with async_client.tasks.with_streaming_response.stream_events_by_name(
+ "task_name",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = await response.parse()
+ await stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_path_params_stream_events_by_name(self, async_client: AsyncAgentex) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"):
+ await async_client.tasks.with_raw_response.stream_events_by_name(
+ "",
+ )
diff --git a/tests/conftest.py b/tests/conftest.py
index d08e65cf6..b6e6697ea 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -45,8 +45,6 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None:
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
-
@pytest.fixture(scope="session")
def client(request: FixtureRequest) -> Iterator[Agentex]:
@@ -54,7 +52,7 @@ def client(request: FixtureRequest) -> Iterator[Agentex]:
if not isinstance(strict, bool):
raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}")
- with Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client:
+ with Agentex(base_url=base_url, _strict_response_validation=strict) as client:
yield client
@@ -78,7 +76,5 @@ async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncAgentex]:
else:
raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict")
- async with AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client
- ) as client:
+ async with AsyncAgentex(base_url=base_url, _strict_response_validation=strict, http_client=http_client) as client:
yield client
diff --git a/tests/test_client.py b/tests/test_client.py
index bb8ce7ed5..2fe22ed91 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -37,7 +37,6 @@
from .utils import update_env
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-api_key = "My API Key"
def _get_params(client: BaseClient[Any, Any]) -> dict[str, str]:
@@ -59,7 +58,7 @@ def _get_open_connections(client: Agentex | AsyncAgentex) -> int:
class TestAgentex:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = Agentex(base_url=base_url, _strict_response_validation=True)
@pytest.mark.respx(base_url=base_url)
def test_raw_response(self, respx_mock: MockRouter) -> None:
@@ -85,10 +84,6 @@ def test_copy(self) -> None:
copied = self.client.copy()
assert id(copied) != id(self.client)
- copied = self.client.copy(api_key="another My API Key")
- assert copied.api_key == "another My API Key"
- assert self.client.api_key == "My API Key"
-
def test_copy_default_options(self) -> None:
# options that have a default are overridden correctly
copied = self.client.copy(max_retries=7)
@@ -106,9 +101,7 @@ def test_copy_default_options(self) -> None:
assert isinstance(self.client.timeout, httpx.Timeout)
def test_copy_default_headers(self) -> None:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"}
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, default_headers={"X-Foo": "bar"})
assert client.default_headers["X-Foo"] == "bar"
# does not override the already given value when not specified
@@ -140,9 +133,7 @@ def test_copy_default_headers(self) -> None:
client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"})
def test_copy_default_query(self) -> None:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"}
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, default_query={"foo": "bar"})
assert _get_params(client)["foo"] == "bar"
# does not override the already given value when not specified
@@ -266,7 +257,7 @@ def test_request_timeout(self) -> None:
assert timeout == httpx.Timeout(100.0)
def test_client_timeout_option(self) -> None:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0))
+ client = Agentex(base_url=base_url, _strict_response_validation=True, timeout=httpx.Timeout(0))
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -275,9 +266,7 @@ def test_client_timeout_option(self) -> None:
def test_http_client_timeout_option(self) -> None:
# custom timeout given to the httpx client should be used
with httpx.Client(timeout=None) as http_client:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -285,9 +274,7 @@ def test_http_client_timeout_option(self) -> None:
# no timeout given to the httpx client should not use the httpx default
with httpx.Client() as http_client:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -295,9 +282,7 @@ def test_http_client_timeout_option(self) -> None:
# explicitly passing the default timeout currently results in it being ignored
with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -306,24 +291,16 @@ def test_http_client_timeout_option(self) -> None:
async def test_invalid_http_client(self) -> None:
with pytest.raises(TypeError, match="Invalid `http_client` arg"):
async with httpx.AsyncClient() as http_client:
- Agentex(
- base_url=base_url,
- api_key=api_key,
- _strict_response_validation=True,
- http_client=cast(Any, http_client),
- )
+ Agentex(base_url=base_url, _strict_response_validation=True, http_client=cast(Any, http_client))
def test_default_headers_option(self) -> None:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"}
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, default_headers={"X-Foo": "bar"})
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
assert request.headers.get("x-foo") == "bar"
assert request.headers.get("x-stainless-lang") == "python"
client2 = Agentex(
base_url=base_url,
- api_key=api_key,
_strict_response_validation=True,
default_headers={
"X-Foo": "stainless",
@@ -334,29 +311,8 @@ def test_default_headers_option(self) -> None:
assert request.headers.get("x-foo") == "stainless"
assert request.headers.get("x-stainless-lang") == "my-overriding-header"
- def test_validate_headers(self) -> None:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
- request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
- assert request.headers.get("Authorization") == f"Bearer {api_key}"
-
- with update_env(**{"AGENTEX_SDK_API_KEY": Omit()}):
- client2 = Agentex(base_url=base_url, api_key=None, _strict_response_validation=True)
-
- with pytest.raises(
- TypeError,
- match="Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted",
- ):
- client2._build_request(FinalRequestOptions(method="get", url="/foo"))
-
- request2 = client2._build_request(
- FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()})
- )
- assert request2.headers.get("Authorization") is None
-
def test_default_query_option(self) -> None:
- client = Agentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"}
- )
+ client = Agentex(base_url=base_url, _strict_response_validation=True, default_query={"query_param": "bar"})
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
url = httpx.URL(request.url)
assert dict(url.params) == {"query_param": "bar"}
@@ -555,7 +511,7 @@ class Model(BaseModel):
assert response.foo == 2
def test_base_url_setter(self) -> None:
- client = Agentex(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True)
+ client = Agentex(base_url="https://example.com/from_init", _strict_response_validation=True)
assert client.base_url == "https://example.com/from_init/"
client.base_url = "https://example.com/from_setter" # type: ignore[assignment]
@@ -564,16 +520,23 @@ def test_base_url_setter(self) -> None:
def test_base_url_env(self) -> None:
with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"):
- client = Agentex(api_key=api_key, _strict_response_validation=True)
+ client = Agentex(_strict_response_validation=True)
assert client.base_url == "http://localhost:5000/from/env/"
+ # explicit environment arg requires explicitness
+ with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"):
+ with pytest.raises(ValueError, match=r"you must pass base_url=None"):
+ Agentex(_strict_response_validation=True, environment="production")
+
+ client = Agentex(base_url=None, _strict_response_validation=True, environment="production")
+ assert str(client.base_url).startswith("http://localhost:5003")
+
@pytest.mark.parametrize(
"client",
[
- Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True),
+ Agentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
Agentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.Client(),
),
@@ -593,10 +556,9 @@ def test_base_url_trailing_slash(self, client: Agentex) -> None:
@pytest.mark.parametrize(
"client",
[
- Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True),
+ Agentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
Agentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.Client(),
),
@@ -616,10 +578,9 @@ def test_base_url_no_trailing_slash(self, client: Agentex) -> None:
@pytest.mark.parametrize(
"client",
[
- Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True),
+ Agentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
Agentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.Client(),
),
@@ -637,7 +598,7 @@ def test_absolute_request_url(self, client: Agentex) -> None:
assert request.url == "https://myapi.com/foo"
def test_copied_client_does_not_close_http(self) -> None:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = Agentex(base_url=base_url, _strict_response_validation=True)
assert not client.is_closed()
copied = client.copy()
@@ -648,7 +609,7 @@ def test_copied_client_does_not_close_http(self) -> None:
assert not client.is_closed()
def test_client_context_manager(self) -> None:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = Agentex(base_url=base_url, _strict_response_validation=True)
with client as c2:
assert c2 is client
assert not c2.is_closed()
@@ -669,7 +630,7 @@ class Model(BaseModel):
def test_client_max_retries_validation(self) -> None:
with pytest.raises(TypeError, match=r"max_retries cannot be None"):
- Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None))
+ Agentex(base_url=base_url, _strict_response_validation=True, max_retries=cast(Any, None))
@pytest.mark.respx(base_url=base_url)
def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None:
@@ -678,12 +639,12 @@ class Model(BaseModel):
respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format"))
- strict_client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ strict_client = Agentex(base_url=base_url, _strict_response_validation=True)
with pytest.raises(APIResponseValidationError):
strict_client.get("/foo", cast_to=Model)
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=False)
+ client = Agentex(base_url=base_url, _strict_response_validation=False)
response = client.get("/foo", cast_to=Model)
assert isinstance(response, str) # type: ignore[unreachable]
@@ -711,7 +672,7 @@ class Model(BaseModel):
)
@mock.patch("time.time", mock.MagicMock(return_value=1696004797))
def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None:
- client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = Agentex(base_url=base_url, _strict_response_validation=True)
headers = httpx.Headers({"retry-after": retry_after})
options = FinalRequestOptions(method="get", url="/foo", max_retries=3)
@@ -721,20 +682,20 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str
@mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Agentex) -> None:
- respx_mock.post("/echo").mock(side_effect=httpx.TimeoutException("Test timeout error"))
+ respx_mock.get("/agents/agent_id").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- client.echo.with_streaming_response.send(message="message").__enter__()
+ client.agents.with_streaming_response.retrieve("agent_id").__enter__()
assert _get_open_connections(self.client) == 0
@mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Agentex) -> None:
- respx_mock.post("/echo").mock(return_value=httpx.Response(500))
+ respx_mock.get("/agents/agent_id").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- client.echo.with_streaming_response.send(message="message").__enter__()
+ client.agents.with_streaming_response.retrieve("agent_id").__enter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -761,9 +722,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = client.echo.with_raw_response.send(message="message")
+ response = client.agents.with_raw_response.retrieve("agent_id")
assert response.retries_taken == failures_before_success
assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success
@@ -785,10 +746,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = client.echo.with_raw_response.send(
- message="message", extra_headers={"x-stainless-retry-count": Omit()}
+ response = client.agents.with_raw_response.retrieve(
+ "agent_id", extra_headers={"x-stainless-retry-count": Omit()}
)
assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0
@@ -810,11 +771,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = client.echo.with_raw_response.send(
- message="message", extra_headers={"x-stainless-retry-count": "42"}
- )
+ response = client.agents.with_raw_response.retrieve("agent_id", extra_headers={"x-stainless-retry-count": "42"})
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
@@ -869,7 +828,7 @@ def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None:
class TestAsyncAgentex:
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True)
@pytest.mark.respx(base_url=base_url)
@pytest.mark.asyncio
@@ -897,10 +856,6 @@ def test_copy(self) -> None:
copied = self.client.copy()
assert id(copied) != id(self.client)
- copied = self.client.copy(api_key="another My API Key")
- assert copied.api_key == "another My API Key"
- assert self.client.api_key == "My API Key"
-
def test_copy_default_options(self) -> None:
# options that have a default are overridden correctly
copied = self.client.copy(max_retries=7)
@@ -918,9 +873,7 @@ def test_copy_default_options(self) -> None:
assert isinstance(self.client.timeout, httpx.Timeout)
def test_copy_default_headers(self) -> None:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"}
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, default_headers={"X-Foo": "bar"})
assert client.default_headers["X-Foo"] == "bar"
# does not override the already given value when not specified
@@ -952,9 +905,7 @@ def test_copy_default_headers(self) -> None:
client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"})
def test_copy_default_query(self) -> None:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"}
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, default_query={"foo": "bar"})
assert _get_params(client)["foo"] == "bar"
# does not override the already given value when not specified
@@ -1078,9 +1029,7 @@ async def test_request_timeout(self) -> None:
assert timeout == httpx.Timeout(100.0)
async def test_client_timeout_option(self) -> None:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0)
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, timeout=httpx.Timeout(0))
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -1089,9 +1038,7 @@ async def test_client_timeout_option(self) -> None:
async def test_http_client_timeout_option(self) -> None:
# custom timeout given to the httpx client should be used
async with httpx.AsyncClient(timeout=None) as http_client:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -1099,9 +1046,7 @@ async def test_http_client_timeout_option(self) -> None:
# no timeout given to the httpx client should not use the httpx default
async with httpx.AsyncClient() as http_client:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -1109,9 +1054,7 @@ async def test_http_client_timeout_option(self) -> None:
# explicitly passing the default timeout currently results in it being ignored
async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, http_client=http_client)
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore
@@ -1120,24 +1063,16 @@ async def test_http_client_timeout_option(self) -> None:
def test_invalid_http_client(self) -> None:
with pytest.raises(TypeError, match="Invalid `http_client` arg"):
with httpx.Client() as http_client:
- AsyncAgentex(
- base_url=base_url,
- api_key=api_key,
- _strict_response_validation=True,
- http_client=cast(Any, http_client),
- )
+ AsyncAgentex(base_url=base_url, _strict_response_validation=True, http_client=cast(Any, http_client))
def test_default_headers_option(self) -> None:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"}
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, default_headers={"X-Foo": "bar"})
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
assert request.headers.get("x-foo") == "bar"
assert request.headers.get("x-stainless-lang") == "python"
client2 = AsyncAgentex(
base_url=base_url,
- api_key=api_key,
_strict_response_validation=True,
default_headers={
"X-Foo": "stainless",
@@ -1148,29 +1083,8 @@ def test_default_headers_option(self) -> None:
assert request.headers.get("x-foo") == "stainless"
assert request.headers.get("x-stainless-lang") == "my-overriding-header"
- def test_validate_headers(self) -> None:
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
- request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
- assert request.headers.get("Authorization") == f"Bearer {api_key}"
-
- with update_env(**{"AGENTEX_SDK_API_KEY": Omit()}):
- client2 = AsyncAgentex(base_url=base_url, api_key=None, _strict_response_validation=True)
-
- with pytest.raises(
- TypeError,
- match="Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted",
- ):
- client2._build_request(FinalRequestOptions(method="get", url="/foo"))
-
- request2 = client2._build_request(
- FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()})
- )
- assert request2.headers.get("Authorization") is None
-
def test_default_query_option(self) -> None:
- client = AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"}
- )
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True, default_query={"query_param": "bar"})
request = client._build_request(FinalRequestOptions(method="get", url="/foo"))
url = httpx.URL(request.url)
assert dict(url.params) == {"query_param": "bar"}
@@ -1369,9 +1283,7 @@ class Model(BaseModel):
assert response.foo == 2
def test_base_url_setter(self) -> None:
- client = AsyncAgentex(
- base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True
- )
+ client = AsyncAgentex(base_url="https://example.com/from_init", _strict_response_validation=True)
assert client.base_url == "https://example.com/from_init/"
client.base_url = "https://example.com/from_setter" # type: ignore[assignment]
@@ -1380,18 +1292,23 @@ def test_base_url_setter(self) -> None:
def test_base_url_env(self) -> None:
with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"):
- client = AsyncAgentex(api_key=api_key, _strict_response_validation=True)
+ client = AsyncAgentex(_strict_response_validation=True)
assert client.base_url == "http://localhost:5000/from/env/"
+ # explicit environment arg requires explicitness
+ with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"):
+ with pytest.raises(ValueError, match=r"you must pass base_url=None"):
+ AsyncAgentex(_strict_response_validation=True, environment="production")
+
+ client = AsyncAgentex(base_url=None, _strict_response_validation=True, environment="production")
+ assert str(client.base_url).startswith("http://localhost:5003")
+
@pytest.mark.parametrize(
"client",
[
- AsyncAgentex(
- base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True
- ),
+ AsyncAgentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
AsyncAgentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.AsyncClient(),
),
@@ -1411,12 +1328,9 @@ def test_base_url_trailing_slash(self, client: AsyncAgentex) -> None:
@pytest.mark.parametrize(
"client",
[
- AsyncAgentex(
- base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True
- ),
+ AsyncAgentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
AsyncAgentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.AsyncClient(),
),
@@ -1436,12 +1350,9 @@ def test_base_url_no_trailing_slash(self, client: AsyncAgentex) -> None:
@pytest.mark.parametrize(
"client",
[
- AsyncAgentex(
- base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True
- ),
+ AsyncAgentex(base_url="http://localhost:5000/custom/path/", _strict_response_validation=True),
AsyncAgentex(
base_url="http://localhost:5000/custom/path/",
- api_key=api_key,
_strict_response_validation=True,
http_client=httpx.AsyncClient(),
),
@@ -1459,7 +1370,7 @@ def test_absolute_request_url(self, client: AsyncAgentex) -> None:
assert request.url == "https://myapi.com/foo"
async def test_copied_client_does_not_close_http(self) -> None:
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True)
assert not client.is_closed()
copied = client.copy()
@@ -1471,7 +1382,7 @@ async def test_copied_client_does_not_close_http(self) -> None:
assert not client.is_closed()
async def test_client_context_manager(self) -> None:
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True)
async with client as c2:
assert c2 is client
assert not c2.is_closed()
@@ -1493,9 +1404,7 @@ class Model(BaseModel):
async def test_client_max_retries_validation(self) -> None:
with pytest.raises(TypeError, match=r"max_retries cannot be None"):
- AsyncAgentex(
- base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None)
- )
+ AsyncAgentex(base_url=base_url, _strict_response_validation=True, max_retries=cast(Any, None))
@pytest.mark.respx(base_url=base_url)
@pytest.mark.asyncio
@@ -1505,12 +1414,12 @@ class Model(BaseModel):
respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format"))
- strict_client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ strict_client = AsyncAgentex(base_url=base_url, _strict_response_validation=True)
with pytest.raises(APIResponseValidationError):
await strict_client.get("/foo", cast_to=Model)
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=False)
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=False)
response = await client.get("/foo", cast_to=Model)
assert isinstance(response, str) # type: ignore[unreachable]
@@ -1539,7 +1448,7 @@ class Model(BaseModel):
@mock.patch("time.time", mock.MagicMock(return_value=1696004797))
@pytest.mark.asyncio
async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None:
- client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True)
+ client = AsyncAgentex(base_url=base_url, _strict_response_validation=True)
headers = httpx.Headers({"retry-after": retry_after})
options = FinalRequestOptions(method="get", url="/foo", max_retries=3)
@@ -1551,20 +1460,20 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte
async def test_retrying_timeout_errors_doesnt_leak(
self, respx_mock: MockRouter, async_client: AsyncAgentex
) -> None:
- respx_mock.post("/echo").mock(side_effect=httpx.TimeoutException("Test timeout error"))
+ respx_mock.get("/agents/agent_id").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- await async_client.echo.with_streaming_response.send(message="message").__aenter__()
+ await async_client.agents.with_streaming_response.retrieve("agent_id").__aenter__()
assert _get_open_connections(self.client) == 0
@mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncAgentex) -> None:
- respx_mock.post("/echo").mock(return_value=httpx.Response(500))
+ respx_mock.get("/agents/agent_id").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- await async_client.echo.with_streaming_response.send(message="message").__aenter__()
+ await async_client.agents.with_streaming_response.retrieve("agent_id").__aenter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -1592,9 +1501,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = await client.echo.with_raw_response.send(message="message")
+ response = await client.agents.with_raw_response.retrieve("agent_id")
assert response.retries_taken == failures_before_success
assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success
@@ -1617,10 +1526,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = await client.echo.with_raw_response.send(
- message="message", extra_headers={"x-stainless-retry-count": Omit()}
+ response = await client.agents.with_raw_response.retrieve(
+ "agent_id", extra_headers={"x-stainless-retry-count": Omit()}
)
assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0
@@ -1643,10 +1552,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
return httpx.Response(500)
return httpx.Response(200)
- respx_mock.post("/echo").mock(side_effect=retry_handler)
+ respx_mock.get("/agents/agent_id").mock(side_effect=retry_handler)
- response = await client.echo.with_raw_response.send(
- message="message", extra_headers={"x-stainless-retry-count": "42"}
+ response = await client.agents.with_raw_response.retrieve(
+ "agent_id", extra_headers={"x-stainless-retry-count": "42"}
)
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 000000000..25a6958b0
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,2149 @@
+version = 1
+revision = 1
+requires-python = ">=3.12, <4"
+
+[[package]]
+name = "agentex"
+version = "0.0.1a0"
+source = { editable = "." }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "fastapi" },
+ { name = "httpx" },
+ { name = "ipykernel" },
+ { name = "jinja2" },
+ { name = "jsonref" },
+ { name = "jsonschema" },
+ { name = "kubernetes" },
+ { name = "litellm" },
+ { name = "mcp", extra = ["cli"] },
+ { name = "openai-agents" },
+ { name = "pydantic" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "python-on-whales" },
+ { name = "pyyaml" },
+ { name = "questionary" },
+ { name = "redis" },
+ { name = "rich" },
+ { name = "scale-gp" },
+ { name = "scale-gp-beta" },
+ { name = "sniffio" },
+ { name = "temporalio" },
+ { name = "typer" },
+ { name = "typing-extensions" },
+ { name = "tzdata" },
+ { name = "tzlocal" },
+ { name = "uvicorn" },
+]
+
+[package.optional-dependencies]
+aiohttp = [
+ { name = "aiohttp" },
+ { name = "httpx-aiohttp" },
+]
+dev = [
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "aiohttp", specifier = ">=3.10.10,<4" },
+ { name = "aiohttp", marker = "extra == 'aiohttp'" },
+ { name = "anyio", specifier = ">=3.5.0,<5" },
+ { name = "distro", specifier = ">=1.7.0,<2" },
+ { name = "fastapi", specifier = ">=0.115.0,<0.116" },
+ { name = "httpx", specifier = ">=0.27.2,<0.28" },
+ { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.8" },
+ { name = "ipykernel", specifier = ">=6.29.5" },
+ { name = "jinja2", specifier = ">=3.1.3,<4" },
+ { name = "jsonref", specifier = ">=1.1.0,<2" },
+ { name = "jsonschema", specifier = ">=4.23.0,<5" },
+ { name = "kubernetes", specifier = ">=25.0.0,<29.0.0" },
+ { name = "litellm", specifier = ">=1.66.0,<2" },
+ { name = "mcp", extras = ["cli"], specifier = ">=1.4.1" },
+ { name = "openai-agents", specifier = ">=0.0.7" },
+ { name = "pydantic", specifier = ">=2.0.0,<3" },
+ { name = "pytest", specifier = ">=8.4.0" },
+ { name = "pytest-asyncio", specifier = ">=1.0.0" },
+ { name = "python-on-whales", specifier = ">=0.73.0,<0.74" },
+ { name = "pyyaml", specifier = ">=6.0.2,<7" },
+ { name = "questionary", specifier = ">=2.0.1,<3" },
+ { name = "redis", specifier = ">=5.2.0,<6" },
+ { name = "rich", specifier = ">=13.9.2,<14" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.3.4" },
+ { name = "scale-gp", specifier = ">=0.1.0a59" },
+ { name = "scale-gp-beta", specifier = "==0.1.0a20" },
+ { name = "sniffio" },
+ { name = "temporalio", specifier = ">=1.10.0,<2" },
+ { name = "typer", specifier = ">=0.16,<0.17" },
+ { name = "typing-extensions", specifier = ">=4.10,<5" },
+ { name = "tzdata", specifier = ">=2025.2" },
+ { name = "tzlocal", specifier = ">=5.3.1" },
+ { name = "uvicorn", specifier = ">=0.31.1" },
+]
+provides-extras = ["aiohttp", "dev"]
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 },
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.12.14"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohappyeyeballs" },
+ { name = "aiosignal" },
+ { name = "attrs" },
+ { name = "frozenlist" },
+ { name = "multidict" },
+ { name = "propcache" },
+ { name = "yarl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e6/0b/e39ad954107ebf213a2325038a3e7a506be3d98e1435e1f82086eec4cde2/aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2", size = 7822921 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c3/0d/29026524e9336e33d9767a1e593ae2b24c2b8b09af7c2bd8193762f76b3e/aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22", size = 701055 },
+ { url = "https://files.pythonhosted.org/packages/0a/b8/a5e8e583e6c8c1056f4b012b50a03c77a669c2e9bf012b7cf33d6bc4b141/aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a", size = 475670 },
+ { url = "https://files.pythonhosted.org/packages/29/e8/5202890c9e81a4ec2c2808dd90ffe024952e72c061729e1d49917677952f/aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff", size = 468513 },
+ { url = "https://files.pythonhosted.org/packages/23/e5/d11db8c23d8923d3484a27468a40737d50f05b05eebbb6288bafcb467356/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d", size = 1715309 },
+ { url = "https://files.pythonhosted.org/packages/53/44/af6879ca0eff7a16b1b650b7ea4a827301737a350a464239e58aa7c387ef/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869", size = 1697961 },
+ { url = "https://files.pythonhosted.org/packages/bb/94/18457f043399e1ec0e59ad8674c0372f925363059c276a45a1459e17f423/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c", size = 1753055 },
+ { url = "https://files.pythonhosted.org/packages/26/d9/1d3744dc588fafb50ff8a6226d58f484a2242b5dd93d8038882f55474d41/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7", size = 1799211 },
+ { url = "https://files.pythonhosted.org/packages/73/12/2530fb2b08773f717ab2d249ca7a982ac66e32187c62d49e2c86c9bba9b4/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660", size = 1718649 },
+ { url = "https://files.pythonhosted.org/packages/b9/34/8d6015a729f6571341a311061b578e8b8072ea3656b3d72329fa0faa2c7c/aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088", size = 1634452 },
+ { url = "https://files.pythonhosted.org/packages/ff/4b/08b83ea02595a582447aeb0c1986792d0de35fe7a22fb2125d65091cbaf3/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7", size = 1695511 },
+ { url = "https://files.pythonhosted.org/packages/b5/66/9c7c31037a063eec13ecf1976185c65d1394ded4a5120dd5965e3473cb21/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9", size = 1716967 },
+ { url = "https://files.pythonhosted.org/packages/ba/02/84406e0ad1acb0fb61fd617651ab6de760b2d6a31700904bc0b33bd0894d/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3", size = 1657620 },
+ { url = "https://files.pythonhosted.org/packages/07/53/da018f4013a7a179017b9a274b46b9a12cbeb387570f116964f498a6f211/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb", size = 1737179 },
+ { url = "https://files.pythonhosted.org/packages/49/e8/ca01c5ccfeaafb026d85fa4f43ceb23eb80ea9c1385688db0ef322c751e9/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425", size = 1765156 },
+ { url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0", size = 1724766 },
+ { url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729", size = 422641 },
+ { url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338", size = 449316 },
+ { url = "https://files.pythonhosted.org/packages/06/48/e0d2fa8ac778008071e7b79b93ab31ef14ab88804d7ba71b5c964a7c844e/aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767", size = 695471 },
+ { url = "https://files.pythonhosted.org/packages/8d/e7/f73206afa33100804f790b71092888f47df65fd9a4cd0e6800d7c6826441/aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e", size = 473128 },
+ { url = "https://files.pythonhosted.org/packages/df/e2/4dd00180be551a6e7ee979c20fc7c32727f4889ee3fd5b0586e0d47f30e1/aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63", size = 465426 },
+ { url = "https://files.pythonhosted.org/packages/de/dd/525ed198a0bb674a323e93e4d928443a680860802c44fa7922d39436b48b/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d", size = 1704252 },
+ { url = "https://files.pythonhosted.org/packages/d8/b1/01e542aed560a968f692ab4fc4323286e8bc4daae83348cd63588e4f33e3/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab", size = 1685514 },
+ { url = "https://files.pythonhosted.org/packages/b3/06/93669694dc5fdabdc01338791e70452d60ce21ea0946a878715688d5a191/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4", size = 1737586 },
+ { url = "https://files.pythonhosted.org/packages/a5/3a/18991048ffc1407ca51efb49ba8bcc1645961f97f563a6c480cdf0286310/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026", size = 1786958 },
+ { url = "https://files.pythonhosted.org/packages/30/a8/81e237f89a32029f9b4a805af6dffc378f8459c7b9942712c809ff9e76e5/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd", size = 1709287 },
+ { url = "https://files.pythonhosted.org/packages/8c/e3/bd67a11b0fe7fc12c6030473afd9e44223d456f500f7cf526dbaa259ae46/aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88", size = 1622990 },
+ { url = "https://files.pythonhosted.org/packages/83/ba/e0cc8e0f0d9ce0904e3cf2d6fa41904e379e718a013c721b781d53dcbcca/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086", size = 1676015 },
+ { url = "https://files.pythonhosted.org/packages/d8/b3/1e6c960520bda094c48b56de29a3d978254637ace7168dd97ddc273d0d6c/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933", size = 1707678 },
+ { url = "https://files.pythonhosted.org/packages/0a/19/929a3eb8c35b7f9f076a462eaa9830b32c7f27d3395397665caa5e975614/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151", size = 1650274 },
+ { url = "https://files.pythonhosted.org/packages/22/e5/81682a6f20dd1b18ce3d747de8eba11cbef9b270f567426ff7880b096b48/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8", size = 1726408 },
+ { url = "https://files.pythonhosted.org/packages/8c/17/884938dffaa4048302985483f77dfce5ac18339aad9b04ad4aaa5e32b028/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3", size = 1759879 },
+ { url = "https://files.pythonhosted.org/packages/95/78/53b081980f50b5cf874359bde707a6eacd6c4be3f5f5c93937e48c9d0025/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758", size = 1708770 },
+ { url = "https://files.pythonhosted.org/packages/ed/91/228eeddb008ecbe3ffa6c77b440597fdf640307162f0c6488e72c5a2d112/aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5", size = 421688 },
+ { url = "https://files.pythonhosted.org/packages/66/5f/8427618903343402fdafe2850738f735fd1d9409d2a8f9bcaae5e630d3ba/aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa", size = 448098 },
+]
+
+[[package]]
+name = "aiosignal"
+version = "1.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "frozenlist" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 },
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
+]
+
+[[package]]
+name = "anyio"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 },
+]
+
+[[package]]
+name = "appnope"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 },
+]
+
+[[package]]
+name = "asttokens"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918 },
+]
+
+[[package]]
+name = "attrs"
+version = "25.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 },
+]
+
+[[package]]
+name = "cachetools"
+version = "5.5.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080 },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.7.14"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722 },
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 },
+ { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 },
+ { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 },
+ { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 },
+ { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 },
+ { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 },
+ { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 },
+ { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 },
+ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 },
+ { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
+ { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
+ { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 },
+ { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 },
+ { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 },
+ { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 },
+ { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 },
+ { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 },
+ { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 },
+ { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 },
+ { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 },
+ { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
+ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 },
+ { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 },
+ { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 },
+ { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 },
+ { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 },
+ { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 },
+ { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 },
+ { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 },
+ { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 },
+ { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 },
+ { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 },
+ { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 },
+ { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 },
+ { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 },
+ { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 },
+ { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 },
+ { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 },
+ { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 },
+ { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 },
+ { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 },
+ { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 },
+ { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 },
+ { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 },
+ { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 },
+ { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 },
+ { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 },
+ { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 },
+]
+
+[[package]]
+name = "click"
+version = "8.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
+]
+
+[[package]]
+name = "comm"
+version = "0.2.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180 },
+]
+
+[[package]]
+name = "debugpy"
+version = "1.8.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/3a9a28ddb750a76eaec445c7f4d3147ea2c579a97dbd9e25d39001b92b21/debugpy-1.8.15.tar.gz", hash = "sha256:58d7a20b7773ab5ee6bdfb2e6cf622fdf1e40c9d5aef2857d85391526719ac00", size = 1643279 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ab/4a/4508d256e52897f5cdfee6a6d7580974811e911c6d01321df3264508a5ac/debugpy-1.8.15-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:3dcc7225cb317469721ab5136cda9ff9c8b6e6fb43e87c9e15d5b108b99d01ba", size = 2511197 },
+ { url = "https://files.pythonhosted.org/packages/99/8d/7f6ef1097e7fecf26b4ef72338d08e41644a41b7ee958a19f494ffcffc29/debugpy-1.8.15-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:047a493ca93c85ccede1dbbaf4e66816794bdc214213dde41a9a61e42d27f8fc", size = 4229517 },
+ { url = "https://files.pythonhosted.org/packages/3f/e8/e8c6a9aa33a9c9c6dacbf31747384f6ed2adde4de2e9693c766bdf323aa3/debugpy-1.8.15-cp312-cp312-win32.whl", hash = "sha256:b08e9b0bc260cf324c890626961dad4ffd973f7568fbf57feb3c3a65ab6b6327", size = 5276132 },
+ { url = "https://files.pythonhosted.org/packages/e9/ad/231050c6177b3476b85fcea01e565dac83607b5233d003ff067e2ee44d8f/debugpy-1.8.15-cp312-cp312-win_amd64.whl", hash = "sha256:e2a4fe357c92334272eb2845fcfcdbec3ef9f22c16cf613c388ac0887aed15fa", size = 5317645 },
+ { url = "https://files.pythonhosted.org/packages/28/70/2928aad2310726d5920b18ed9f54b9f06df5aa4c10cf9b45fa18ff0ab7e8/debugpy-1.8.15-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:f5e01291ad7d6649aed5773256c5bba7a1a556196300232de1474c3c372592bf", size = 2495538 },
+ { url = "https://files.pythonhosted.org/packages/9e/c6/9b8ffb4ca91fac8b2877eef63c9cc0e87dd2570b1120054c272815ec4cd0/debugpy-1.8.15-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94dc0f0d00e528d915e0ce1c78e771475b2335b376c49afcc7382ee0b146bab6", size = 4221874 },
+ { url = "https://files.pythonhosted.org/packages/55/8a/9b8d59674b4bf489318c7c46a1aab58e606e583651438084b7e029bf3c43/debugpy-1.8.15-cp313-cp313-win32.whl", hash = "sha256:fcf0748d4f6e25f89dc5e013d1129ca6f26ad4da405e0723a4f704583896a709", size = 5275949 },
+ { url = "https://files.pythonhosted.org/packages/72/83/9e58e6fdfa8710a5e6ec06c2401241b9ad48b71c0a7eb99570a1f1edb1d3/debugpy-1.8.15-cp313-cp313-win_amd64.whl", hash = "sha256:73c943776cb83e36baf95e8f7f8da765896fd94b05991e7bc162456d25500683", size = 5317720 },
+ { url = "https://files.pythonhosted.org/packages/07/d5/98748d9860e767a1248b5e31ffa7ce8cb7006e97bf8abbf3d891d0a8ba4e/debugpy-1.8.15-py2.py3-none-any.whl", hash = "sha256:bce2e6c5ff4f2e00b98d45e7e01a49c7b489ff6df5f12d881c67d2f1ac635f3d", size = 5282697 },
+]
+
+[[package]]
+name = "decorator"
+version = "5.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 },
+]
+
+[[package]]
+name = "distro"
+version = "1.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 },
+]
+
+[[package]]
+name = "executing"
+version = "2.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702 },
+]
+
+[[package]]
+name = "fastapi"
+version = "0.115.14"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "starlette" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ca/53/8c38a874844a8b0fa10dd8adf3836ac154082cf88d3f22b544e9ceea0a15/fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739", size = 296263 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514 },
+]
+
+[[package]]
+name = "filelock"
+version = "3.18.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 },
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424 },
+ { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952 },
+ { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688 },
+ { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084 },
+ { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524 },
+ { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493 },
+ { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116 },
+ { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557 },
+ { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820 },
+ { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542 },
+ { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350 },
+ { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093 },
+ { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482 },
+ { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590 },
+ { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785 },
+ { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487 },
+ { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874 },
+ { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791 },
+ { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165 },
+ { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881 },
+ { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409 },
+ { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132 },
+ { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638 },
+ { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539 },
+ { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646 },
+ { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233 },
+ { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996 },
+ { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280 },
+ { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717 },
+ { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644 },
+ { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879 },
+ { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502 },
+ { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169 },
+ { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219 },
+ { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345 },
+ { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880 },
+ { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498 },
+ { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296 },
+ { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103 },
+ { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869 },
+ { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467 },
+ { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028 },
+ { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294 },
+ { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898 },
+ { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465 },
+ { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385 },
+ { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771 },
+ { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206 },
+ { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620 },
+ { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059 },
+ { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516 },
+ { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106 },
+]
+
+[[package]]
+name = "fsspec"
+version = "2025.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8b/02/0835e6ab9cfc03916fe3f78c0956cfcdb6ff2669ffa6651065d5ebf7fc98/fsspec-2025.7.0.tar.gz", hash = "sha256:786120687ffa54b8283d942929540d8bc5ccfa820deb555a2b5d0ed2b737bf58", size = 304432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597 },
+]
+
+[[package]]
+name = "google-auth"
+version = "2.40.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cachetools" },
+ { name = "pyasn1-modules" },
+ { name = "rsa" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", size = 281029 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137 },
+]
+
+[[package]]
+name = "griffe"
+version = "1.7.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a9/3e/5aa9a61f7c3c47b0b52a1d930302992229d191bf4bc76447b324b731510a/griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b", size = 395137 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/58/c6/5c20af38c2a57c15d87f7f38bee77d63c1d2a3689f74fefaf35915dd12b2/griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75", size = 129303 },
+]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 },
+]
+
+[[package]]
+name = "hf-xet"
+version = "1.1.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694", size = 495969 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23", size = 2687929 },
+ { url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8", size = 2556338 },
+ { url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1", size = 3102894 },
+ { url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18", size = 3002134 },
+ { url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14", size = 3171009 },
+ { url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a", size = 3279245 },
+ { url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245", size = 2738931 },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 },
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "certifi" },
+ { name = "httpcore" },
+ { name = "idna" },
+ { name = "sniffio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 },
+]
+
+[[package]]
+name = "httpx-aiohttp"
+version = "0.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "httpx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/37/19/ae2d2bf1f57fdd23c8ad83675599fb5c407fa13bc20e90f00cffa4dea3aa/httpx_aiohttp-0.1.8.tar.gz", hash = "sha256:756c5e74cdb568c3248ba63fe82bfe8bbe64b928728720f7eaac64b3cf46f308", size = 25401 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/7a/514c484b88cc4ebbcd2e27e92b86019c0c5bb920582f5fbb10b7e6c78574/httpx_aiohttp-0.1.8-py3-none-any.whl", hash = "sha256:b7bd958d1331f3759a38a0ba22ad29832cb63ca69498c17735228055bf78fa7e", size = 6180 },
+]
+
+[[package]]
+name = "httpx-sse"
+version = "0.4.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054 },
+]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.33.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "filelock" },
+ { name = "fsspec" },
+ { name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
+ { name = "packaging" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4b/9e/9366b7349fc125dd68b9d384a0fea84d67b7497753fe92c71b67e13f47c4/huggingface_hub-0.33.4.tar.gz", hash = "sha256:6af13478deae120e765bfd92adad0ae1aec1ad8c439b46f23058ad5956cbca0a", size = 426674 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/7b/98daa50a2db034cab6cd23a3de04fa2358cb691593d28e9130203eb7a805/huggingface_hub-0.33.4-py3-none-any.whl", hash = "sha256:09f9f4e7ca62547c70f8b82767eefadd2667f4e116acba2e3e62a5a81815a7bb", size = 515339 },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "zipp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656 },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 },
+]
+
+[[package]]
+name = "ipykernel"
+version = "6.29.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "appnope", marker = "sys_platform == 'darwin'" },
+ { name = "comm" },
+ { name = "debugpy" },
+ { name = "ipython" },
+ { name = "jupyter-client" },
+ { name = "jupyter-core" },
+ { name = "matplotlib-inline" },
+ { name = "nest-asyncio" },
+ { name = "packaging" },
+ { name = "psutil" },
+ { name = "pyzmq" },
+ { name = "tornado" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173 },
+]
+
+[[package]]
+name = "ipython"
+version = "9.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "decorator" },
+ { name = "ipython-pygments-lexers" },
+ { name = "jedi" },
+ { name = "matplotlib-inline" },
+ { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
+ { name = "prompt-toolkit" },
+ { name = "pygments" },
+ { name = "stack-data" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/54/80/406f9e3bde1c1fd9bf5a0be9d090f8ae623e401b7670d8f6fdf2ab679891/ipython-9.4.0.tar.gz", hash = "sha256:c033c6d4e7914c3d9768aabe76bbe87ba1dc66a92a05db6bfa1125d81f2ee270", size = 4385338 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/63/f8/0031ee2b906a15a33d6bfc12dd09c3dfa966b3cb5b284ecfb7549e6ac3c4/ipython-9.4.0-py3-none-any.whl", hash = "sha256:25850f025a446d9b359e8d296ba175a36aedd32e83ca9b5060430fe16801f066", size = 611021 },
+]
+
+[[package]]
+name = "ipython-pygments-lexers"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074 },
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "parso" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
+]
+
+[[package]]
+name = "jiter"
+version = "0.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262 },
+ { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124 },
+ { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330 },
+ { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670 },
+ { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057 },
+ { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372 },
+ { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038 },
+ { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538 },
+ { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557 },
+ { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202 },
+ { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781 },
+ { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176 },
+ { url = "https://files.pythonhosted.org/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617 },
+ { url = "https://files.pythonhosted.org/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947 },
+ { url = "https://files.pythonhosted.org/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618 },
+ { url = "https://files.pythonhosted.org/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829 },
+ { url = "https://files.pythonhosted.org/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034 },
+ { url = "https://files.pythonhosted.org/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529 },
+ { url = "https://files.pythonhosted.org/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671 },
+ { url = "https://files.pythonhosted.org/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864 },
+ { url = "https://files.pythonhosted.org/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989 },
+ { url = "https://files.pythonhosted.org/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495 },
+ { url = "https://files.pythonhosted.org/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289 },
+ { url = "https://files.pythonhosted.org/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074 },
+ { url = "https://files.pythonhosted.org/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225 },
+ { url = "https://files.pythonhosted.org/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235 },
+ { url = "https://files.pythonhosted.org/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278 },
+ { url = "https://files.pythonhosted.org/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866 },
+ { url = "https://files.pythonhosted.org/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772 },
+ { url = "https://files.pythonhosted.org/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534 },
+ { url = "https://files.pythonhosted.org/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087 },
+ { url = "https://files.pythonhosted.org/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694 },
+ { url = "https://files.pythonhosted.org/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992 },
+ { url = "https://files.pythonhosted.org/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723 },
+ { url = "https://files.pythonhosted.org/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215 },
+ { url = "https://files.pythonhosted.org/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762 },
+ { url = "https://files.pythonhosted.org/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427 },
+ { url = "https://files.pythonhosted.org/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127 },
+ { url = "https://files.pythonhosted.org/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527 },
+ { url = "https://files.pythonhosted.org/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213 },
+]
+
+[[package]]
+name = "jsonref"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/aa/0d/c1f3277e90ccdb50d33ed5ba1ec5b3f0a242ed8c1b1a85d3afeb68464dca/jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552", size = 8814 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/ec/e1db9922bceb168197a558a2b8c03a7963f1afe93517ddd3cf99f202f996/jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9", size = 9425 },
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.25.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "jsonschema-specifications" },
+ { name = "referencing" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d5/00/a297a868e9d0784450faa7365c2172a7d6110c763e30ba861867c32ae6a9/jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f", size = 356830 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184 },
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2025.4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "referencing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437 },
+]
+
+[[package]]
+name = "jupyter-client"
+version = "8.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-core" },
+ { name = "python-dateutil" },
+ { name = "pyzmq" },
+ { name = "tornado" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 },
+]
+
+[[package]]
+name = "jupyter-core"
+version = "5.8.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "platformdirs" },
+ { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880 },
+]
+
+[[package]]
+name = "kubernetes"
+version = "28.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "google-auth" },
+ { name = "oauthlib" },
+ { name = "python-dateutil" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "requests-oauthlib" },
+ { name = "six" },
+ { name = "urllib3" },
+ { name = "websocket-client" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3c/5e/d27f39f447137a9a3d1f31142c77ce74bcedfda7dafe922d725c7ef2da33/kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9", size = 817854 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f5/6a/1f69c2d8b1ff03f8d8e10d801f4ac3016ed4c1b00aa9795732c6ec900bba/kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d", size = 1566315 },
+]
+
+[[package]]
+name = "litellm"
+version = "1.74.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "click" },
+ { name = "httpx" },
+ { name = "importlib-metadata" },
+ { name = "jinja2" },
+ { name = "jsonschema" },
+ { name = "openai" },
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+ { name = "tiktoken" },
+ { name = "tokenizers" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/e6/286741f91cdc6c14614a1b60696c265d6b83e784e978512d852d4dc29872/litellm-1.74.6.tar.gz", hash = "sha256:297ad20bb51ab0c3f7f0d5b442954b02eec382adfaf17f730827d02f51b944b1", size = 9423274 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/2d/bcfd998134d35445e9bdcbfe999081892b84eb34901863717b3ac456344e/litellm-1.74.6-py3-none-any.whl", hash = "sha256:d2581bc5d4d6427b265496ff662eb14f673e904faaf33c8c4d37a29a051af772", size = 8646736 },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 },
+ { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 },
+ { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 },
+ { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 },
+ { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 },
+ { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 },
+ { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 },
+ { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 },
+ { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 },
+ { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 },
+ { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 },
+ { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 },
+ { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 },
+ { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 },
+ { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 },
+ { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 },
+ { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 },
+ { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 },
+ { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 },
+ { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 },
+ { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 },
+ { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 },
+ { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 },
+ { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 },
+ { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 },
+ { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 },
+ { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 },
+ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 },
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 },
+]
+
+[[package]]
+name = "mcp"
+version = "1.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "httpx" },
+ { name = "httpx-sse" },
+ { name = "jsonschema" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "python-multipart" },
+ { name = "pywin32", marker = "sys_platform == 'win32'" },
+ { name = "sse-starlette" },
+ { name = "starlette" },
+ { name = "uvicorn", marker = "sys_platform != 'emscripten'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/45/94/caa0f4754e2437f7033068989f13fee784856f95870c786b0b5c2c0f511e/mcp-1.12.0.tar.gz", hash = "sha256:853f6b17a3f31ea6e2f278c2ec7d3b38457bc80c7c2c675260dd7f04a6fd0e70", size = 424678 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/da/c7eaab6a58f1034de115b7902141ad8f81b4f3bbf7dc0cc267594947a4d7/mcp-1.12.0-py3-none-any.whl", hash = "sha256:19a498b2bf273283e463b4dd1ed83f791fbba5c25bfa16b8b34cfd5571673e7f", size = 158470 },
+]
+
+[package.optional-dependencies]
+cli = [
+ { name = "python-dotenv" },
+ { name = "typer" },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
+]
+
+[[package]]
+name = "multidict"
+version = "6.6.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514 },
+ { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394 },
+ { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590 },
+ { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292 },
+ { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385 },
+ { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328 },
+ { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057 },
+ { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341 },
+ { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081 },
+ { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581 },
+ { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750 },
+ { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548 },
+ { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718 },
+ { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603 },
+ { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351 },
+ { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860 },
+ { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982 },
+ { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210 },
+ { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843 },
+ { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053 },
+ { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273 },
+ { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124 },
+ { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892 },
+ { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547 },
+ { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223 },
+ { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262 },
+ { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345 },
+ { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248 },
+ { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115 },
+ { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649 },
+ { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203 },
+ { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051 },
+ { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601 },
+ { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683 },
+ { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811 },
+ { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056 },
+ { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811 },
+ { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304 },
+ { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775 },
+ { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773 },
+ { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083 },
+ { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980 },
+ { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776 },
+ { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882 },
+ { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816 },
+ { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341 },
+ { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854 },
+ { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432 },
+ { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731 },
+ { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086 },
+ { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338 },
+ { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812 },
+ { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011 },
+ { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254 },
+ { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313 },
+]
+
+[[package]]
+name = "nest-asyncio"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 },
+]
+
+[[package]]
+name = "nexus-rpc"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ef/66/540687556bd28cf1ec370cc6881456203dfddb9dab047b8979c6865b5984/nexus_rpc-1.1.0.tar.gz", hash = "sha256:d65ad6a2f54f14e53ebe39ee30555eaeb894102437125733fb13034a04a44553", size = 77383 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bf/2f/9e9d0dcaa4c6ffa22b7aa31069a8a264c753ff8027b36af602cce038c92f/nexus_rpc-1.1.0-py3-none-any.whl", hash = "sha256:d1b007af2aba186a27e736f8eaae39c03aed05b488084ff6c3d1785c9ba2ad38", size = 27743 },
+]
+
+[[package]]
+name = "oauthlib"
+version = "3.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065 },
+]
+
+[[package]]
+name = "openai"
+version = "1.97.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "jiter" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e0/c6/b8d66e4f3b95493a8957065b24533333c927dc23817abe397f13fe589c6e/openai-1.97.0.tar.gz", hash = "sha256:0be349569ccaa4fb54f97bb808423fd29ccaeb1246ee1be762e0c81a47bae0aa", size = 493850 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/91/1f1cf577f745e956b276a8b1d3d76fa7a6ee0c2b05db3b001b900f2c71db/openai-1.97.0-py3-none-any.whl", hash = "sha256:a1c24d96f4609f3f7f51c9e1c2606d97cc6e334833438659cfd687e9c972c610", size = 764953 },
+]
+
+[[package]]
+name = "openai-agents"
+version = "0.2.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+ { name = "mcp" },
+ { name = "openai" },
+ { name = "pydantic" },
+ { name = "requests" },
+ { name = "types-requests" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/45/e2/b23f1076fb16f672d8dced4e4fa055e0ee824b35c1b11c2c23106ce8d99d/openai_agents-0.2.2.tar.gz", hash = "sha256:8a40971db5113be8f1a5842f30cd9f5ebec29848124f64f95820b6d52096241c", size = 1453555 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d5/00/5ab04ee5d49eebc08e835d79cbaf0cbde1fef7aff8c3f1bbb9e967c9ff23/openai_agents-0.2.2-py3-none-any.whl", hash = "sha256:cff8a2556c8e69e05aff3ab9eb40ceff05494c60297a682964dbc8e5309a3686", size = 161083 },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
+]
+
+[[package]]
+name = "parso"
+version = "0.8.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 },
+]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ptyprocess" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.3.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567 },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 },
+]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.51"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wcwidth" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810 },
+]
+
+[[package]]
+name = "propcache"
+version = "0.3.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674 },
+ { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570 },
+ { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094 },
+ { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958 },
+ { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894 },
+ { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672 },
+ { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395 },
+ { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510 },
+ { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949 },
+ { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258 },
+ { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036 },
+ { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684 },
+ { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562 },
+ { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142 },
+ { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711 },
+ { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479 },
+ { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286 },
+ { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425 },
+ { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846 },
+ { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871 },
+ { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720 },
+ { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203 },
+ { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365 },
+ { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016 },
+ { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596 },
+ { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977 },
+ { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220 },
+ { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642 },
+ { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789 },
+ { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880 },
+ { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220 },
+ { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678 },
+ { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560 },
+ { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676 },
+ { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701 },
+ { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934 },
+ { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316 },
+ { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619 },
+ { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896 },
+ { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111 },
+ { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334 },
+ { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026 },
+ { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724 },
+ { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868 },
+ { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322 },
+ { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778 },
+ { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175 },
+ { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857 },
+ { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663 },
+]
+
+[[package]]
+name = "protobuf"
+version = "5.29.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963 },
+ { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818 },
+ { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091 },
+ { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824 },
+ { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942 },
+ { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823 },
+]
+
+[[package]]
+name = "psutil"
+version = "7.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 },
+ { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 },
+ { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 },
+ { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 },
+ { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 },
+ { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 },
+ { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 },
+]
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 },
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 },
+]
+
+[[package]]
+name = "pyasn1"
+version = "0.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 },
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyasn1" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.11.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782 },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.33.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 },
+ { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 },
+ { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 },
+ { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 },
+ { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 },
+ { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 },
+ { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 },
+ { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 },
+ { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 },
+ { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 },
+ { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 },
+ { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 },
+ { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 },
+ { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 },
+ { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 },
+ { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 },
+ { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 },
+ { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 },
+ { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 },
+ { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 },
+ { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 },
+ { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 },
+ { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 },
+ { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 },
+ { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 },
+ { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 },
+ { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 },
+ { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 },
+ { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 },
+ { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 },
+ { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 },
+]
+
+[[package]]
+name = "pydantic-settings"
+version = "2.10.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235 },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 },
+]
+
+[[package]]
+name = "pyjwt"
+version = "2.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344 },
+]
+
+[[package]]
+name = "pytest"
+version = "8.4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474 },
+]
+
+[[package]]
+name = "pytest-asyncio"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157 },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 },
+]
+
+[[package]]
+name = "python-multipart"
+version = "0.0.20"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
+]
+
+[[package]]
+name = "python-on-whales"
+version = "0.73.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "requests" },
+ { name = "tqdm" },
+ { name = "typer" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/40/c3/f57dd3e7d20af8a0399bb87471eac4698e0686b04073eef4bc291204a709/python_on_whales-0.73.0.tar.gz", hash = "sha256:c76bf3633550e5c948fb4215918364f45efaddb2e09df5ddd169132f7ffdc249", size = 112019 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d4/e9/ea125eb8954f64e76485aec5c63ca6a5b977e0127a5f3896993f1692166e/python_on_whales-0.73.0-py3-none-any.whl", hash = "sha256:66f31749c2544a0aacb4e3ba03772c2e9227235ea1aecd58aa7a4cdcf26f559a", size = 118125 },
+]
+
+[[package]]
+name = "pywin32"
+version = "311"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 },
+ { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 },
+ { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 },
+ { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 },
+ { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 },
+ { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 },
+ { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714 },
+ { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800 },
+ { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 },
+ { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 },
+ { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 },
+ { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 },
+ { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 },
+ { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 },
+ { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 },
+ { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 },
+ { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 },
+ { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 },
+ { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 },
+ { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 },
+ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 },
+ { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 },
+ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 },
+]
+
+[[package]]
+name = "pyzmq"
+version = "27.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "implementation_name == 'pypy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f1/06/50a4e9648b3e8b992bef8eb632e457307553a89d294103213cfd47b3da69/pyzmq-27.0.0.tar.gz", hash = "sha256:b1f08eeb9ce1510e6939b6e5dcd46a17765e2333daae78ecf4606808442e52cf", size = 280478 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/93/a7/9ad68f55b8834ede477842214feba6a4c786d936c022a67625497aacf61d/pyzmq-27.0.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:cbabc59dcfaac66655c040dfcb8118f133fb5dde185e5fc152628354c1598e52", size = 1305438 },
+ { url = "https://files.pythonhosted.org/packages/ba/ee/26aa0f98665a22bc90ebe12dced1de5f3eaca05363b717f6fb229b3421b3/pyzmq-27.0.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:cb0ac5179cba4b2f94f1aa208fbb77b62c4c9bf24dd446278b8b602cf85fcda3", size = 895095 },
+ { url = "https://files.pythonhosted.org/packages/cf/85/c57e7ab216ecd8aa4cc7e3b83b06cc4e9cf45c87b0afc095f10cd5ce87c1/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53a48f0228eab6cbf69fde3aa3c03cbe04e50e623ef92ae395fce47ef8a76152", size = 651826 },
+ { url = "https://files.pythonhosted.org/packages/69/9a/9ea7e230feda9400fb0ae0d61d7d6ddda635e718d941c44eeab22a179d34/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:111db5f395e09f7e775f759d598f43cb815fc58e0147623c4816486e1a39dc22", size = 839750 },
+ { url = "https://files.pythonhosted.org/packages/08/66/4cebfbe71f3dfbd417011daca267539f62ed0fbc68105357b68bbb1a25b7/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c8878011653dcdc27cc2c57e04ff96f0471e797f5c19ac3d7813a245bcb24371", size = 1641357 },
+ { url = "https://files.pythonhosted.org/packages/ac/f6/b0f62578c08d2471c791287149cb8c2aaea414ae98c6e995c7dbe008adfb/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:c0ed2c1f335ba55b5fdc964622254917d6b782311c50e138863eda409fbb3b6d", size = 2020281 },
+ { url = "https://files.pythonhosted.org/packages/37/b9/4f670b15c7498495da9159edc374ec09c88a86d9cd5a47d892f69df23450/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e918d70862d4cfd4b1c187310015646a14e1f5917922ab45b29f28f345eeb6be", size = 1877110 },
+ { url = "https://files.pythonhosted.org/packages/66/31/9dee25c226295b740609f0d46db2fe972b23b6f5cf786360980524a3ba92/pyzmq-27.0.0-cp312-abi3-win32.whl", hash = "sha256:88b4e43cab04c3c0f0d55df3b1eef62df2b629a1a369b5289a58f6fa8b07c4f4", size = 559297 },
+ { url = "https://files.pythonhosted.org/packages/9b/12/52da5509800f7ff2d287b2f2b4e636e7ea0f001181cba6964ff6c1537778/pyzmq-27.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:dce4199bf5f648a902ce37e7b3afa286f305cd2ef7a8b6ec907470ccb6c8b371", size = 619203 },
+ { url = "https://files.pythonhosted.org/packages/93/6d/7f2e53b19d1edb1eb4f09ec7c3a1f945ca0aac272099eab757d15699202b/pyzmq-27.0.0-cp312-abi3-win_arm64.whl", hash = "sha256:56e46bbb85d52c1072b3f809cc1ce77251d560bc036d3a312b96db1afe76db2e", size = 551927 },
+ { url = "https://files.pythonhosted.org/packages/19/62/876b27c4ff777db4ceba1c69ea90d3c825bb4f8d5e7cd987ce5802e33c55/pyzmq-27.0.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c36ad534c0c29b4afa088dc53543c525b23c0797e01b69fef59b1a9c0e38b688", size = 1340826 },
+ { url = "https://files.pythonhosted.org/packages/43/69/58ef8f4f59d3bcd505260c73bee87b008850f45edca40ddaba54273c35f4/pyzmq-27.0.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:67855c14173aec36395d7777aaba3cc527b393821f30143fd20b98e1ff31fd38", size = 897283 },
+ { url = "https://files.pythonhosted.org/packages/43/15/93a0d0396700a60475ad3c5d42c5f1c308d3570bc94626b86c71ef9953e0/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8617c7d43cd8ccdb62aebe984bfed77ca8f036e6c3e46dd3dddda64b10f0ab7a", size = 660567 },
+ { url = "https://files.pythonhosted.org/packages/0e/b3/fe055513e498ca32f64509abae19b9c9eb4d7c829e02bd8997dd51b029eb/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:67bfbcbd0a04c575e8103a6061d03e393d9f80ffdb9beb3189261e9e9bc5d5e9", size = 847681 },
+ { url = "https://files.pythonhosted.org/packages/b6/4f/ff15300b00b5b602191f3df06bbc8dd4164e805fdd65bb77ffbb9c5facdc/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5cd11d46d7b7e5958121b3eaf4cd8638eff3a720ec527692132f05a57f14341d", size = 1650148 },
+ { url = "https://files.pythonhosted.org/packages/c4/6f/84bdfff2a224a6f26a24249a342e5906993c50b0761e311e81b39aef52a7/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:b801c2e40c5aa6072c2f4876de8dccd100af6d9918d4d0d7aa54a1d982fd4f44", size = 2023768 },
+ { url = "https://files.pythonhosted.org/packages/64/39/dc2db178c26a42228c5ac94a9cc595030458aa64c8d796a7727947afbf55/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:20d5cb29e8c5f76a127c75b6e7a77e846bc4b655c373baa098c26a61b7ecd0ef", size = 1885199 },
+ { url = "https://files.pythonhosted.org/packages/c7/21/dae7b06a1f8cdee5d8e7a63d99c5d129c401acc40410bef2cbf42025e26f/pyzmq-27.0.0-cp313-cp313t-win32.whl", hash = "sha256:a20528da85c7ac7a19b7384e8c3f8fa707841fd85afc4ed56eda59d93e3d98ad", size = 575439 },
+ { url = "https://files.pythonhosted.org/packages/eb/bc/1709dc55f0970cf4cb8259e435e6773f9946f41a045c2cb90e870b7072da/pyzmq-27.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d8229f2efece6a660ee211d74d91dbc2a76b95544d46c74c615e491900dc107f", size = 639933 },
+]
+
+[[package]]
+name = "questionary"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "prompt-toolkit" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/b8/d16eb579277f3de9e56e5ad25280fab52fc5774117fb70362e8c2e016559/questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587", size = 26775 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ad/3f/11dd4cd4f39e05128bfd20138faea57bec56f9ffba6185d276e3107ba5b2/questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec", size = 36747 },
+]
+
+[[package]]
+name = "redis"
+version = "5.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyjwt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/dd/2b37032f4119dff2a2f9bbcaade03221b100ba26051bb96e275de3e5db7a/redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e", size = 4626288 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/b0/aa601efe12180ba492b02e270554877e68467e66bda5d73e51eaa8ecc78a/redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83", size = 272836 },
+]
+
+[[package]]
+name = "referencing"
+version = "0.36.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "rpds-py" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 },
+]
+
+[[package]]
+name = "regex"
+version = "2024.11.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781 },
+ { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455 },
+ { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759 },
+ { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976 },
+ { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077 },
+ { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160 },
+ { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896 },
+ { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997 },
+ { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725 },
+ { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481 },
+ { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896 },
+ { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138 },
+ { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692 },
+ { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135 },
+ { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567 },
+ { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525 },
+ { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324 },
+ { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617 },
+ { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023 },
+ { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072 },
+ { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130 },
+ { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857 },
+ { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006 },
+ { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650 },
+ { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545 },
+ { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045 },
+ { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182 },
+ { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733 },
+ { url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122 },
+ { url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545 },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847 },
+]
+
+[[package]]
+name = "requests-oauthlib"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "oauthlib" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 },
+]
+
+[[package]]
+name = "rich"
+version = "13.9.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 },
+]
+
+[[package]]
+name = "rpds-py"
+version = "0.26.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933 },
+ { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447 },
+ { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711 },
+ { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865 },
+ { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763 },
+ { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651 },
+ { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079 },
+ { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379 },
+ { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033 },
+ { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639 },
+ { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105 },
+ { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272 },
+ { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995 },
+ { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198 },
+ { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917 },
+ { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073 },
+ { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214 },
+ { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113 },
+ { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189 },
+ { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998 },
+ { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903 },
+ { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785 },
+ { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329 },
+ { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875 },
+ { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636 },
+ { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663 },
+ { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428 },
+ { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571 },
+ { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475 },
+ { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692 },
+ { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415 },
+ { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783 },
+ { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844 },
+ { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105 },
+ { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440 },
+ { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759 },
+ { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032 },
+ { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416 },
+ { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049 },
+ { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428 },
+ { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524 },
+ { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292 },
+ { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334 },
+ { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875 },
+ { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993 },
+ { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683 },
+ { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825 },
+ { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292 },
+ { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435 },
+ { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410 },
+ { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724 },
+ { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285 },
+ { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459 },
+ { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083 },
+ { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291 },
+ { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445 },
+ { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206 },
+ { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330 },
+ { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254 },
+ { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094 },
+ { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889 },
+ { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301 },
+ { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891 },
+ { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044 },
+ { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774 },
+ { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886 },
+ { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027 },
+ { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821 },
+]
+
+[[package]]
+name = "rsa"
+version = "4.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyasn1" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.12.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/9b/ce/8d7dbedede481245b489b769d27e2934730791a9a82765cb94566c6e6abd/ruff-0.12.4.tar.gz", hash = "sha256:13efa16df6c6eeb7d0f091abae50f58e9522f3843edb40d56ad52a5a4a4b6873", size = 5131435 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ae/9f/517bc5f61bad205b7f36684ffa5415c013862dee02f55f38a217bdbe7aa4/ruff-0.12.4-py3-none-linux_armv6l.whl", hash = "sha256:cb0d261dac457ab939aeb247e804125a5d521b21adf27e721895b0d3f83a0d0a", size = 10188824 },
+ { url = "https://files.pythonhosted.org/packages/28/83/691baae5a11fbbde91df01c565c650fd17b0eabed259e8b7563de17c6529/ruff-0.12.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:55c0f4ca9769408d9b9bac530c30d3e66490bd2beb2d3dae3e4128a1f05c7442", size = 10884521 },
+ { url = "https://files.pythonhosted.org/packages/d6/8d/756d780ff4076e6dd035d058fa220345f8c458391f7edfb1c10731eedc75/ruff-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a8224cc3722c9ad9044da7f89c4c1ec452aef2cfe3904365025dd2f51daeae0e", size = 10277653 },
+ { url = "https://files.pythonhosted.org/packages/8d/97/8eeee0f48ece153206dce730fc9e0e0ca54fd7f261bb3d99c0a4343a1892/ruff-0.12.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9949d01d64fa3672449a51ddb5d7548b33e130240ad418884ee6efa7a229586", size = 10485993 },
+ { url = "https://files.pythonhosted.org/packages/49/b8/22a43d23a1f68df9b88f952616c8508ea6ce4ed4f15353b8168c48b2d7e7/ruff-0.12.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:be0593c69df9ad1465e8a2d10e3defd111fdb62dcd5be23ae2c06da77e8fcffb", size = 10022824 },
+ { url = "https://files.pythonhosted.org/packages/cd/70/37c234c220366993e8cffcbd6cadbf332bfc848cbd6f45b02bade17e0149/ruff-0.12.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7dea966bcb55d4ecc4cc3270bccb6f87a337326c9dcd3c07d5b97000dbff41c", size = 11524414 },
+ { url = "https://files.pythonhosted.org/packages/14/77/c30f9964f481b5e0e29dd6a1fae1f769ac3fd468eb76fdd5661936edd262/ruff-0.12.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afcfa3ab5ab5dd0e1c39bf286d829e042a15e966b3726eea79528e2e24d8371a", size = 12419216 },
+ { url = "https://files.pythonhosted.org/packages/6e/79/af7fe0a4202dce4ef62c5e33fecbed07f0178f5b4dd9c0d2fcff5ab4a47c/ruff-0.12.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c057ce464b1413c926cdb203a0f858cd52f3e73dcb3270a3318d1630f6395bb3", size = 11976756 },
+ { url = "https://files.pythonhosted.org/packages/09/d1/33fb1fc00e20a939c305dbe2f80df7c28ba9193f7a85470b982815a2dc6a/ruff-0.12.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e64b90d1122dc2713330350626b10d60818930819623abbb56535c6466cce045", size = 11020019 },
+ { url = "https://files.pythonhosted.org/packages/64/f4/e3cd7f7bda646526f09693e2e02bd83d85fff8a8222c52cf9681c0d30843/ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abc48f3d9667fdc74022380b5c745873499ff827393a636f7a59da1515e7c57", size = 11277890 },
+ { url = "https://files.pythonhosted.org/packages/5e/d0/69a85fb8b94501ff1a4f95b7591505e8983f38823da6941eb5b6badb1e3a/ruff-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b2449dc0c138d877d629bea151bee8c0ae3b8e9c43f5fcaafcd0c0d0726b184", size = 10348539 },
+ { url = "https://files.pythonhosted.org/packages/16/a0/91372d1cb1678f7d42d4893b88c252b01ff1dffcad09ae0c51aa2542275f/ruff-0.12.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:56e45bb11f625db55f9b70477062e6a1a04d53628eda7784dce6e0f55fd549eb", size = 10009579 },
+ { url = "https://files.pythonhosted.org/packages/23/1b/c4a833e3114d2cc0f677e58f1df6c3b20f62328dbfa710b87a1636a5e8eb/ruff-0.12.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:478fccdb82ca148a98a9ff43658944f7ab5ec41c3c49d77cd99d44da019371a1", size = 10942982 },
+ { url = "https://files.pythonhosted.org/packages/ff/ce/ce85e445cf0a5dd8842f2f0c6f0018eedb164a92bdf3eda51984ffd4d989/ruff-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0fc426bec2e4e5f4c4f182b9d2ce6a75c85ba9bcdbe5c6f2a74fcb8df437df4b", size = 11343331 },
+ { url = "https://files.pythonhosted.org/packages/35/cf/441b7fc58368455233cfb5b77206c849b6dfb48b23de532adcc2e50ccc06/ruff-0.12.4-py3-none-win32.whl", hash = "sha256:4de27977827893cdfb1211d42d84bc180fceb7b72471104671c59be37041cf93", size = 10267904 },
+ { url = "https://files.pythonhosted.org/packages/ce/7e/20af4a0df5e1299e7368d5ea4350412226afb03d95507faae94c80f00afd/ruff-0.12.4-py3-none-win_amd64.whl", hash = "sha256:fe0b9e9eb23736b453143d72d2ceca5db323963330d5b7859d60d101147d461a", size = 11209038 },
+ { url = "https://files.pythonhosted.org/packages/11/02/8857d0dfb8f44ef299a5dfd898f673edefb71e3b533b3b9d2db4c832dd13/ruff-0.12.4-py3-none-win_arm64.whl", hash = "sha256:0618ec4442a83ab545e5b71202a5c0ed7791e8471435b94e655b570a5031a98e", size = 10469336 },
+]
+
+[[package]]
+name = "scale-gp"
+version = "0.1.0a59"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/3e/c944564757c0a737937c3c87ef61ccfd5138bbe5201fa93edc704124a297/scale_gp-0.1.0a59.tar.gz", hash = "sha256:b7c1e6edb431824f44b8ed2e49969345465e45a1978c1ab6462a7db6d8718f1a", size = 408472 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/7c/de71f2853d062535a762157eb9bc0c180fee160447039f074cbefe495728/scale_gp-0.1.0a59-py3-none-any.whl", hash = "sha256:841846c83e4760e14b76ac2a9b44d40a3e9800ad8505f9d24fef11d981466e35", size = 545196 },
+]
+
+[[package]]
+name = "scale-gp-beta"
+version = "0.1.0a20"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/86/2b/a6a373ec8d33fad8b553182aa1ceade94434752c935a4ce16ca84d188c0f/scale_gp_beta-0.1.0a20.tar.gz", hash = "sha256:a2be5c1afcb9171d3c2d6f15b45f4512105c79ab12be8e7ef5a22167ac77fb88", size = 157799 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/af/12/55f47289a0ae1065e4115bc8018b2f01df0b6560b07bacfc0dcf6c3bdcbe/scale_gp_beta-0.1.0a20-py3-none-any.whl", hash = "sha256:482385ee6c3b912aecf70795948ac45b215a4d19feba60f67d6e10c4312440c6", size = 157906 },
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
+]
+
+[[package]]
+name = "sse-starlette"
+version = "2.4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/07/3e/eae74d8d33e3262bae0a7e023bb43d8bdd27980aa3557333f4632611151f/sse_starlette-2.4.1.tar.gz", hash = "sha256:7c8a800a1ca343e9165fc06bbda45c78e4c6166320707ae30b416c42da070926", size = 18635 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e4/f1/6c7eaa8187ba789a6dd6d74430307478d2a91c23a5452ab339b6fbe15a08/sse_starlette-2.4.1-py3-none-any.whl", hash = "sha256:08b77ea898ab1a13a428b2b6f73cfe6d0e607a7b4e15b9bb23e4a37b087fd39a", size = 10824 },
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "asttokens" },
+ { name = "executing" },
+ { name = "pure-eval" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 },
+]
+
+[[package]]
+name = "starlette"
+version = "0.46.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 },
+]
+
+[[package]]
+name = "temporalio"
+version = "1.14.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "nexus-rpc" },
+ { name = "protobuf" },
+ { name = "types-protobuf" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/40/23/ef5ed581d26112e21c4a6d4ddc2c4eaa5700c0d70b53b07566553e9b7d90/temporalio-1.14.1.tar.gz", hash = "sha256:b240cf56f64add65beb75bd18aa854ac35bdc2505097af5af1e235d611190a9d", size = 1607639 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bd/66/6dc4f5a647a9901cf19e012c442173574babdc879ccaf4cb166662a23ef0/temporalio-1.14.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ebde00b59af72e512e5837445e4b5b8aa445431d57a71bbeb57a5ba8a93ac8be", size = 12508009 },
+ { url = "https://files.pythonhosted.org/packages/bb/dc/654ebcc92c658180576127ac6dc047fab43b7730f39df4439645e91577fb/temporalio-1.14.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:3c21cff8fdc60fbcc9acd91e6c119b0b5f9de7671fe806459f00d68bd4ecae78", size = 12091653 },
+ { url = "https://files.pythonhosted.org/packages/8a/58/7fc3a7bde275c059e42d0279c54e8e66642b67be8eda21b31347f4277186/temporalio-1.14.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f984b503ae741213fe71128d6193076f3267691561ff3c55dbe798f92e6ee1b", size = 12451995 },
+ { url = "https://files.pythonhosted.org/packages/98/12/14f6a7a1f4aebb7d846469f5c1cd165cce55b793ded6ce5fc315bd83e28f/temporalio-1.14.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:830cb1a820624a5e64f6c874b5aca6ad9eb841295407dd2011074159a2d28bdb", size = 12688904 },
+ { url = "https://files.pythonhosted.org/packages/b4/ed/c09f1ca41d5ed9f9a777a0ddd5bc225f8300bab8b42bc6751195566706fb/temporalio-1.14.1-cp39-abi3-win_amd64.whl", hash = "sha256:ad4e6a16b42bb34aebec62fb8bbe8f64643d8268ed6d7db337dfe98a76799bb0", size = 12758696 },
+]
+
+[[package]]
+name = "tiktoken"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "regex" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 },
+ { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 },
+ { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 },
+ { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 },
+ { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 },
+ { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 },
+ { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919 },
+ { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877 },
+ { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095 },
+ { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649 },
+ { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465 },
+ { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669 },
+]
+
+[[package]]
+name = "tokenizers"
+version = "0.21.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "huggingface-hub" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hash = "sha256:fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77", size = 351545 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec", size = 2875206 },
+ { url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f", size = 2732655 },
+ { url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12", size = 3019202 },
+ { url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91", size = 2934539 },
+ { url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb", size = 3248665 },
+ { url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab", size = 3451305 },
+ { url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae", size = 3214757 },
+ { url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020", size = 3121887 },
+ { url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19", size = 9091965 },
+ { url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d", size = 9053372 },
+ { url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365", size = 9298632 },
+ { url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958", size = 9470074 },
+ { url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl", hash = "sha256:cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962", size = 2330115 },
+ { url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl", hash = "sha256:58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98", size = 2509918 },
+]
+
+[[package]]
+name = "tornado"
+version = "6.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/51/89/c72771c81d25d53fe33e3dca61c233b665b2780f21820ba6fd2c6793c12b/tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c", size = 509934 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/89/f4532dee6843c9e0ebc4e28d4be04c67f54f60813e4bf73d595fe7567452/tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7", size = 441948 },
+ { url = "https://files.pythonhosted.org/packages/15/9a/557406b62cffa395d18772e0cdcf03bed2fff03b374677348eef9f6a3792/tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6", size = 440112 },
+ { url = "https://files.pythonhosted.org/packages/55/82/7721b7319013a3cf881f4dffa4f60ceff07b31b394e459984e7a36dc99ec/tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888", size = 443672 },
+ { url = "https://files.pythonhosted.org/packages/7d/42/d11c4376e7d101171b94e03cef0cbce43e823ed6567ceda571f54cf6e3ce/tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331", size = 443019 },
+ { url = "https://files.pythonhosted.org/packages/7d/f7/0c48ba992d875521ac761e6e04b0a1750f8150ae42ea26df1852d6a98942/tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e", size = 443252 },
+ { url = "https://files.pythonhosted.org/packages/89/46/d8d7413d11987e316df4ad42e16023cd62666a3c0dfa1518ffa30b8df06c/tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401", size = 443930 },
+ { url = "https://files.pythonhosted.org/packages/78/b2/f8049221c96a06df89bed68260e8ca94beca5ea532ffc63b1175ad31f9cc/tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692", size = 443351 },
+ { url = "https://files.pythonhosted.org/packages/76/ff/6a0079e65b326cc222a54720a748e04a4db246870c4da54ece4577bfa702/tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a", size = 443328 },
+ { url = "https://files.pythonhosted.org/packages/49/18/e3f902a1d21f14035b5bc6246a8c0f51e0eef562ace3a2cea403c1fb7021/tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365", size = 444396 },
+ { url = "https://files.pythonhosted.org/packages/7b/09/6526e32bf1049ee7de3bebba81572673b19a2a8541f795d887e92af1a8bc/tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b", size = 444840 },
+ { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596 },
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 },
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 },
+]
+
+[[package]]
+name = "typer"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "shellingham" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317 },
+]
+
+[[package]]
+name = "types-protobuf"
+version = "6.30.2.20250703"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/dc/54/d63ce1eee8e93c4d710bbe2c663ec68e3672cf4f2fca26eecd20981c0c5d/types_protobuf-6.30.2.20250703.tar.gz", hash = "sha256:609a974754bbb71fa178fc641f51050395e8e1849f49d0420a6281ed8d1ddf46", size = 62300 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/2b/5d0377c3d6e0f49d4847ad2c40629593fee4a5c9ec56eba26a15c708fbc0/types_protobuf-6.30.2.20250703-py3-none-any.whl", hash = "sha256:fa5aff9036e9ef432d703abbdd801b436a249b6802e4df5ef74513e272434e57", size = 76489 },
+]
+
+[[package]]
+name = "types-requests"
+version = "2.31.0.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "types-urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f9/b8/c1e8d39996b4929b918aba10dba5de07a8b3f4c8487bb61bb79882544e69/types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0", size = 15535 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/a1/6f8dc74d9069e790d604ddae70cb46dcbac668f1bb08136e7b0f2f5cd3bf/types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9", size = 14516 },
+]
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.25.14"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/73/de/b9d7a68ad39092368fb21dd6194b362b98a1daeea5dcfef5e1adb5031c7e/types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f", size = 11239 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/11/7b/3fc711b2efea5e85a7a0bbfe269ea944aa767bbba5ec52f9ee45d362ccf3/types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e", size = 15377 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.14.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906 },
+]
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 },
+]
+
+[[package]]
+name = "tzdata"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
+]
+
+[[package]]
+name = "tzlocal"
+version = "5.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "tzdata", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026 },
+]
+
+[[package]]
+name = "urllib3"
+version = "1.26.20"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225 },
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.35.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406 },
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 },
+]
+
+[[package]]
+name = "yarl"
+version = "1.20.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "multidict" },
+ { name = "propcache" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667 },
+ { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025 },
+ { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709 },
+ { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287 },
+ { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429 },
+ { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429 },
+ { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862 },
+ { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616 },
+ { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954 },
+ { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575 },
+ { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061 },
+ { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142 },
+ { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894 },
+ { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378 },
+ { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069 },
+ { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249 },
+ { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710 },
+ { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811 },
+ { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078 },
+ { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748 },
+ { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595 },
+ { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616 },
+ { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324 },
+ { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676 },
+ { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614 },
+ { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766 },
+ { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615 },
+ { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982 },
+ { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792 },
+ { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049 },
+ { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774 },
+ { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252 },
+ { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198 },
+ { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346 },
+ { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826 },
+ { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217 },
+ { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700 },
+ { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644 },
+ { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452 },
+ { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378 },
+ { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261 },
+ { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987 },
+ { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361 },
+ { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460 },
+ { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486 },
+ { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219 },
+ { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693 },
+ { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803 },
+ { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709 },
+ { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591 },
+ { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003 },
+ { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542 },
+]
+
+[[package]]
+name = "zipp"
+version = "3.23.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 },
+]